Implement code changes to enhance functionality and improve performance

This commit is contained in:
pacnpal
2025-08-23 18:42:09 -04:00
parent d504d41de2
commit 02c7cbd1cd
4 changed files with 317 additions and 0 deletions

View File

@@ -0,0 +1,43 @@
# A generic Alembic configuration file.
[alembic]
# path to migration scripts
script_location = alembic
# The database URL is now set dynamically by ConPort's run_migrations function.
# sqlalchemy.url = sqlite:///your_database.db
# ... other Alembic settings ...
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@@ -0,0 +1,79 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line prevents the need to have a separate logging config file.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,195 @@
"""Initial schema
Revision ID: 20250617
Revises:
Create Date: 2025-06-17 15:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
import json
# revision identifiers, used by Alembic.
revision = '20250617'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto-generated by Alembic - please adjust! ###
op.create_table('active_context',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('active_context_history',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('version', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('change_source', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('context_links',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('workspace_id', sa.String(length=1024), nullable=False),
sa.Column('source_item_type', sa.String(length=255), nullable=False),
sa.Column('source_item_id', sa.String(length=255), nullable=False),
sa.Column('target_item_type', sa.String(length=255), nullable=False),
sa.Column('target_item_id', sa.String(length=255), nullable=False),
sa.Column('relationship_type', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_context_links_source_item_id'), 'context_links', ['source_item_id'], unique=False)
op.create_index(op.f('ix_context_links_source_item_type'), 'context_links', ['source_item_type'], unique=False)
op.create_index(op.f('ix_context_links_target_item_id'), 'context_links', ['target_item_id'], unique=False)
op.create_index(op.f('ix_context_links_target_item_type'), 'context_links', ['target_item_type'], unique=False)
op.create_table('custom_data',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('key', sa.String(length=255), nullable=False),
sa.Column('value', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('category', 'key')
)
op.create_table('decisions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('summary', sa.Text(), nullable=False),
sa.Column('rationale', sa.Text(), nullable=True),
sa.Column('implementation_details', sa.Text(), nullable=True),
sa.Column('tags', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('product_context',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('product_context_history',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('version', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('change_source', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('progress_entries',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('parent_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['progress_entries.id'], ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('system_patterns',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('tags', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
# Seed initial data
op.execute("INSERT INTO product_context (id, content) VALUES (1, '{}')")
op.execute("INSERT INTO active_context (id, content) VALUES (1, '{}')")
# Create FTS5 virtual table for decisions
op.execute('''
CREATE VIRTUAL TABLE decisions_fts USING fts5(
summary,
rationale,
implementation_details,
tags,
content="decisions",
content_rowid="id"
);
''')
# Create triggers to keep the FTS table in sync with the decisions table
op.execute('''
CREATE TRIGGER decisions_after_insert AFTER INSERT ON decisions
BEGIN
INSERT INTO decisions_fts (rowid, summary, rationale, implementation_details, tags)
VALUES (new.id, new.summary, new.rationale, new.implementation_details, new.tags);
END;
''')
op.execute('''
CREATE TRIGGER decisions_after_delete AFTER DELETE ON decisions
BEGIN
INSERT INTO decisions_fts (decisions_fts, rowid, summary, rationale, implementation_details, tags)
VALUES ('delete', old.id, old.summary, old.rationale, old.implementation_details, old.tags);
END;
''')
op.execute('''
CREATE TRIGGER decisions_after_update AFTER UPDATE ON decisions
BEGIN
INSERT INTO decisions_fts (decisions_fts, rowid, summary, rationale, implementation_details, tags)
VALUES ('delete', old.id, old.summary, old.rationale, old.implementation_details, old.tags);
INSERT INTO decisions_fts (rowid, summary, rationale, implementation_details, tags)
VALUES (new.id, new.summary, new.rationale, new.implementation_details, new.tags);
END;
''')
# Create FTS5 virtual table for custom_data
op.execute('''
CREATE VIRTUAL TABLE custom_data_fts USING fts5(
category,
key,
value_text,
content="custom_data",
content_rowid="id"
);
''')
# Create triggers for custom_data_fts
op.execute('''
CREATE TRIGGER custom_data_after_insert AFTER INSERT ON custom_data
BEGIN
INSERT INTO custom_data_fts (rowid, category, key, value_text)
VALUES (new.id, new.category, new.key, new.value);
END;
''')
op.execute('''
CREATE TRIGGER custom_data_after_delete AFTER DELETE ON custom_data
BEGIN
INSERT INTO custom_data_fts (custom_data_fts, rowid, category, key, value_text)
VALUES ('delete', old.id, old.category, old.key, old.value);
END;
''')
op.execute('''
CREATE TRIGGER custom_data_after_update AFTER UPDATE ON custom_data
BEGIN
INSERT INTO custom_data_fts (custom_data_fts, rowid, category, key, value_text)
VALUES ('delete', old.id, old.category, old.key, old.value);
INSERT INTO custom_data_fts (rowid, category, key, value_text)
VALUES (new.id, new.category, new.key, new.value);
END;
''')
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto-generated by Alembic - please adjust! ###
op.drop_table('system_patterns')
op.drop_table('progress_entries')
op.drop_table('product_context_history')
op.drop_table('product_context')
op.drop_table('decisions')
op.drop_table('custom_data')
op.drop_index(op.f('ix_context_links_target_item_type'), table_name='context_links')
op.drop_index(op.f('ix_context_links_target_item_id'), table_name='context_links')
op.drop_index(op.f('ix_context_links_source_item_type'), table_name='context_links')
op.drop_index(op.f('ix_context_links_source_item_id'), table_name='context_links')
op.drop_table('context_links')
op.drop_table('active_context_history')
op.drop_table('active_context')
# ### end Alembic commands ###

BIN
context_portal/context.db Normal file

Binary file not shown.