mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-20 14:11:09 -05:00
feat: complete monorepo structure with frontend and shared resources
- Add complete backend/ directory with full Django application - Add frontend/ directory with Vite + TypeScript setup ready for Next.js - Add comprehensive shared/ directory with: - Complete documentation and memory-bank archives - Media files and avatars (letters, park/ride images) - Deployment scripts and automation tools - Shared types and utilities - Add architecture/ directory with migration guides - Configure pnpm workspace for monorepo development - Update .gitignore to exclude .django_tailwind_cli/ build artifacts - Preserve all historical documentation in shared/docs/memory-bank/ - Set up proper structure for full-stack development with shared resources
This commit is contained in:
43
backend/apps/context_portal/alembic.ini
Normal file
43
backend/apps/context_portal/alembic.ini
Normal file
@@ -0,0 +1,43 @@
|
||||
|
||||
# A generic Alembic configuration file.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# The database URL is now set dynamically by ConPort's run_migrations function.
|
||||
# sqlalchemy.url = sqlite:///your_database.db
|
||||
# ... other Alembic settings ...
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
76
backend/apps/context_portal/alembic/env.py
Normal file
76
backend/apps/context_portal/alembic/env.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line prevents the need to have a separate logging config file.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = None
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
@@ -0,0 +1,247 @@
|
||||
"""Initial schema
|
||||
|
||||
Revision ID: 20250617
|
||||
Revises:
|
||||
Create Date: 2025-06-17 15:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import json
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "20250617"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto-generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"active_context",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("content", sa.Text(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"active_context_history",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("timestamp", sa.DateTime(), nullable=False),
|
||||
sa.Column("version", sa.Integer(), nullable=False),
|
||||
sa.Column("content", sa.Text(), nullable=False),
|
||||
sa.Column("change_source", sa.String(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"context_links",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("workspace_id", sa.String(length=1024), nullable=False),
|
||||
sa.Column("source_item_type", sa.String(length=255), nullable=False),
|
||||
sa.Column("source_item_id", sa.String(length=255), nullable=False),
|
||||
sa.Column("target_item_type", sa.String(length=255), nullable=False),
|
||||
sa.Column("target_item_id", sa.String(length=255), nullable=False),
|
||||
sa.Column("relationship_type", sa.String(length=255), nullable=False),
|
||||
sa.Column("description", sa.Text(), nullable=True),
|
||||
sa.Column(
|
||||
"timestamp",
|
||||
sa.DateTime(),
|
||||
server_default=sa.text("(CURRENT_TIMESTAMP)"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_context_links_source_item_id"),
|
||||
"context_links",
|
||||
["source_item_id"],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_context_links_source_item_type"),
|
||||
"context_links",
|
||||
["source_item_type"],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_context_links_target_item_id"),
|
||||
"context_links",
|
||||
["target_item_id"],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_context_links_target_item_type"),
|
||||
"context_links",
|
||||
["target_item_type"],
|
||||
unique=False,
|
||||
)
|
||||
op.create_table(
|
||||
"custom_data",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("timestamp", sa.DateTime(), nullable=False),
|
||||
sa.Column("category", sa.String(length=255), nullable=False),
|
||||
sa.Column("key", sa.String(length=255), nullable=False),
|
||||
sa.Column("value", sa.Text(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("category", "key"),
|
||||
)
|
||||
op.create_table(
|
||||
"decisions",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("timestamp", sa.DateTime(), nullable=False),
|
||||
sa.Column("summary", sa.Text(), nullable=False),
|
||||
sa.Column("rationale", sa.Text(), nullable=True),
|
||||
sa.Column("implementation_details", sa.Text(), nullable=True),
|
||||
sa.Column("tags", sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"product_context",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("content", sa.Text(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"product_context_history",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("timestamp", sa.DateTime(), nullable=False),
|
||||
sa.Column("version", sa.Integer(), nullable=False),
|
||||
sa.Column("content", sa.Text(), nullable=False),
|
||||
sa.Column("change_source", sa.String(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"progress_entries",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("timestamp", sa.DateTime(), nullable=False),
|
||||
sa.Column("status", sa.String(length=50), nullable=False),
|
||||
sa.Column("description", sa.Text(), nullable=False),
|
||||
sa.Column("parent_id", sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["parent_id"], ["progress_entries.id"], ondelete="SET NULL"
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"system_patterns",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("timestamp", sa.DateTime(), nullable=False),
|
||||
sa.Column("name", sa.String(length=255), nullable=False),
|
||||
sa.Column("description", sa.Text(), nullable=True),
|
||||
sa.Column("tags", sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("name"),
|
||||
)
|
||||
|
||||
# Seed initial data
|
||||
op.execute("INSERT INTO product_context (id, content) VALUES (1, '{}')")
|
||||
op.execute("INSERT INTO active_context (id, content) VALUES (1, '{}')")
|
||||
|
||||
# Create FTS5 virtual table for decisions
|
||||
op.execute(
|
||||
"""
|
||||
CREATE VIRTUAL TABLE decisions_fts USING fts5(
|
||||
summary,
|
||||
rationale,
|
||||
implementation_details,
|
||||
tags,
|
||||
content="decisions",
|
||||
content_rowid="id"
|
||||
);
|
||||
"""
|
||||
)
|
||||
|
||||
# Create triggers to keep the FTS table in sync with the decisions table
|
||||
op.execute(
|
||||
"""
|
||||
CREATE TRIGGER decisions_after_insert AFTER INSERT ON decisions
|
||||
BEGIN
|
||||
INSERT INTO decisions_fts (rowid, summary, rationale, implementation_details, tags)
|
||||
VALUES (new.id, new.summary, new.rationale, new.implementation_details, new.tags);
|
||||
END;
|
||||
"""
|
||||
)
|
||||
op.execute(
|
||||
"""
|
||||
CREATE TRIGGER decisions_after_delete AFTER DELETE ON decisions
|
||||
BEGIN
|
||||
INSERT INTO decisions_fts (decisions_fts, rowid, summary, rationale, implementation_details, tags)
|
||||
VALUES ('delete', old.id, old.summary, old.rationale, old.implementation_details, old.tags);
|
||||
END;
|
||||
"""
|
||||
)
|
||||
op.execute(
|
||||
"""
|
||||
CREATE TRIGGER decisions_after_update AFTER UPDATE ON decisions
|
||||
BEGIN
|
||||
INSERT INTO decisions_fts (decisions_fts, rowid, summary, rationale, implementation_details, tags)
|
||||
VALUES ('delete', old.id, old.summary, old.rationale, old.implementation_details, old.tags);
|
||||
INSERT INTO decisions_fts (rowid, summary, rationale, implementation_details, tags)
|
||||
VALUES (new.id, new.summary, new.rationale, new.implementation_details, new.tags);
|
||||
END;
|
||||
"""
|
||||
)
|
||||
|
||||
# Create FTS5 virtual table for custom_data
|
||||
op.execute(
|
||||
"""
|
||||
CREATE VIRTUAL TABLE custom_data_fts USING fts5(
|
||||
category,
|
||||
key,
|
||||
value_text,
|
||||
content="custom_data",
|
||||
content_rowid="id"
|
||||
);
|
||||
"""
|
||||
)
|
||||
|
||||
# Create triggers for custom_data_fts
|
||||
op.execute(
|
||||
"""
|
||||
CREATE TRIGGER custom_data_after_insert AFTER INSERT ON custom_data
|
||||
BEGIN
|
||||
INSERT INTO custom_data_fts (rowid, category, key, value_text)
|
||||
VALUES (new.id, new.category, new.key, new.value);
|
||||
END;
|
||||
"""
|
||||
)
|
||||
op.execute(
|
||||
"""
|
||||
CREATE TRIGGER custom_data_after_delete AFTER DELETE ON custom_data
|
||||
BEGIN
|
||||
INSERT INTO custom_data_fts (custom_data_fts, rowid, category, key, value_text)
|
||||
VALUES ('delete', old.id, old.category, old.key, old.value);
|
||||
END;
|
||||
"""
|
||||
)
|
||||
op.execute(
|
||||
"""
|
||||
CREATE TRIGGER custom_data_after_update AFTER UPDATE ON custom_data
|
||||
BEGIN
|
||||
INSERT INTO custom_data_fts (custom_data_fts, rowid, category, key, value_text)
|
||||
VALUES ('delete', old.id, old.category, old.key, old.value);
|
||||
INSERT INTO custom_data_fts (rowid, category, key, value_text)
|
||||
VALUES (new.id, new.category, new.key, new.value);
|
||||
END;
|
||||
"""
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto-generated by Alembic - please adjust! ###
|
||||
op.drop_table("system_patterns")
|
||||
op.drop_table("progress_entries")
|
||||
op.drop_table("product_context_history")
|
||||
op.drop_table("product_context")
|
||||
op.drop_table("decisions")
|
||||
op.drop_table("custom_data")
|
||||
op.drop_index(op.f("ix_context_links_target_item_type"), table_name="context_links")
|
||||
op.drop_index(op.f("ix_context_links_target_item_id"), table_name="context_links")
|
||||
op.drop_index(op.f("ix_context_links_source_item_type"), table_name="context_links")
|
||||
op.drop_index(op.f("ix_context_links_source_item_id"), table_name="context_links")
|
||||
op.drop_table("context_links")
|
||||
op.drop_table("active_context_history")
|
||||
op.drop_table("active_context")
|
||||
# ### end Alembic commands ###
|
||||
BIN
backend/apps/context_portal/context.db
Normal file
BIN
backend/apps/context_portal/context.db
Normal file
Binary file not shown.
Reference in New Issue
Block a user