feat(01-foundation-01): Alembic migrations with RLS and tenant isolation tests
- alembic.ini + migrations/env.py: async SQLAlchemy migration setup using asyncpg
- migrations/versions/001_initial_schema.py: creates tenants, agents, channel_connections, portal_users
- ENABLE + FORCE ROW LEVEL SECURITY on agents and channel_connections
- RLS policy: tenant_id = current_setting('app.current_tenant', TRUE)::uuid
- konstruct_app role created with SELECT/INSERT/UPDATE/DELETE on all tables
- packages/shared/shared/rls.py: idempotent configure_rls_hook, UUID-sanitized SET LOCAL
- tests/conftest.py: test_db_name (session-scoped), db_engine + db_session as konstruct_app
- tests/unit/test_normalize.py: 11 tests for KonstructMessage Slack normalization (CHAN-01)
- tests/unit/test_tenant_resolution.py: 7 tests for workspace_id → tenant resolution (TNNT-02)
- tests/unit/test_redis_namespacing.py: 15 tests for Redis key namespace isolation (TNNT-03)
- tests/integration/test_tenant_isolation.py: 7 tests proving RLS tenant isolation (TNNT-01)
- tenant_b cannot see tenant_a's agents or channel_connections
- FORCE ROW LEVEL SECURITY verified via pg_class.relforcerowsecurity
This commit is contained in:
103
migrations/env.py
Normal file
103
migrations/env.py
Normal file
@@ -0,0 +1,103 @@
|
||||
"""
|
||||
Alembic migration environment — async SQLAlchemy configuration.
|
||||
|
||||
Uses asyncpg driver with asyncio migration pattern required for SQLAlchemy 2.0.
|
||||
Runs migrations as the postgres admin user (DATABASE_ADMIN_URL) so it can:
|
||||
- CREATE ROLE konstruct_app
|
||||
- ENABLE ROW LEVEL SECURITY
|
||||
- FORCE ROW LEVEL SECURITY
|
||||
- CREATE POLICY
|
||||
|
||||
Application code always uses DATABASE_URL (konstruct_app role).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Make sure packages/shared is importable when running `alembic upgrade head`
|
||||
# from the repo root.
|
||||
# ---------------------------------------------------------------------------
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
|
||||
|
||||
from shared.models.tenant import Base # noqa: E402 # type: ignore[import]
|
||||
|
||||
# Import auth model to register it with Base.metadata
|
||||
import shared.models.auth # noqa: E402, F401 # type: ignore[import]
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Metadata for autogenerate support
|
||||
# ---------------------------------------------------------------------------
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Use DATABASE_ADMIN_URL if set (for CI / production migrations),
|
||||
# otherwise fall back to alembic.ini sqlalchemy.url.
|
||||
# ---------------------------------------------------------------------------
|
||||
database_url = os.environ.get("DATABASE_ADMIN_URL") or config.get_main_option("sqlalchemy.url")
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""
|
||||
Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL and not an Engine.
|
||||
Useful for generating SQL scripts without a live DB connection.
|
||||
"""
|
||||
context.configure(
|
||||
url=database_url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""
|
||||
Create an async engine and run migrations within an async context.
|
||||
|
||||
This is the required pattern for SQLAlchemy 2.0 + asyncpg.
|
||||
"""
|
||||
connectable = create_async_engine(database_url, echo=False)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def do_run_migrations(connection: object) -> None:
|
||||
"""Synchronous migration runner — called within async context."""
|
||||
context.configure(connection=connection, target_metadata=target_metadata) # type: ignore[arg-type]
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode with a live DB connection."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
migrations/script.py.mako
Normal file
28
migrations/script.py.mako
Normal file
@@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
195
migrations/versions/001_initial_schema.py
Normal file
195
migrations/versions/001_initial_schema.py
Normal file
@@ -0,0 +1,195 @@
|
||||
"""Initial schema: tenants, agents, channel_connections, portal_users with RLS
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2026-03-23
|
||||
|
||||
This migration:
|
||||
1. Creates the konstruct_app application role (non-superuser)
|
||||
2. Creates all four tables matching the SQLAlchemy models
|
||||
3. Applies Row Level Security (RLS) with FORCE ROW LEVEL SECURITY to
|
||||
tenant-scoped tables (agents, channel_connections)
|
||||
4. Creates RLS policies that scope rows to app.current_tenant session variable
|
||||
5. Grants appropriate permissions to konstruct_app role
|
||||
|
||||
CRITICAL: FORCE ROW LEVEL SECURITY is applied to agents and channel_connections.
|
||||
This means even the table owner cannot bypass RLS. The integration test
|
||||
`test_tenant_isolation.py` must verify this is in effect.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "001"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
# All valid channel types — kept in sync with ChannelType StrEnum in message.py
|
||||
_CHANNEL_TYPES = ("slack", "whatsapp", "mattermost", "rocketchat", "teams", "telegram", "signal")
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# -------------------------------------------------------------------------
|
||||
# 1. Create application role (idempotent)
|
||||
# -------------------------------------------------------------------------
|
||||
op.execute("""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT FROM pg_catalog.pg_roles WHERE rolname = 'konstruct_app') THEN
|
||||
CREATE ROLE konstruct_app WITH LOGIN PASSWORD 'konstruct_dev';
|
||||
END IF;
|
||||
END
|
||||
$$
|
||||
""")
|
||||
|
||||
op.execute("GRANT USAGE ON SCHEMA public TO konstruct_app")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# 2. Create channel_type ENUM (using raw SQL to avoid SQLAlchemy auto-emit)
|
||||
# We use op.execute with raw DDL so SQLAlchemy does NOT auto-emit
|
||||
# a second CREATE TYPE statement in create_table below.
|
||||
# -------------------------------------------------------------------------
|
||||
op.execute(
|
||||
"CREATE TYPE channel_type_enum AS ENUM "
|
||||
"('slack', 'whatsapp', 'mattermost', 'rocketchat', 'teams', 'telegram', 'signal')"
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# 3. Create tenants table (no RLS — platform admin needs full visibility)
|
||||
# -------------------------------------------------------------------------
|
||||
op.create_table(
|
||||
"tenants",
|
||||
sa.Column("id", UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")),
|
||||
sa.Column("name", sa.String(255), nullable=False, unique=True),
|
||||
sa.Column("slug", sa.String(100), nullable=False, unique=True),
|
||||
sa.Column("settings", sa.JSON, nullable=False, server_default=sa.text("'{}'")),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.text("NOW()")),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=sa.text("NOW()"),
|
||||
),
|
||||
)
|
||||
op.create_index("ix_tenants_slug", "tenants", ["slug"])
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# 4. Create agents table with RLS
|
||||
# -------------------------------------------------------------------------
|
||||
op.create_table(
|
||||
"agents",
|
||||
sa.Column("id", UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")),
|
||||
sa.Column("tenant_id", UUID(as_uuid=True), sa.ForeignKey("tenants.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column("name", sa.String(255), nullable=False),
|
||||
sa.Column("role", sa.String(255), nullable=False),
|
||||
sa.Column("persona", sa.Text, nullable=False, server_default=sa.text("''")),
|
||||
sa.Column("system_prompt", sa.Text, nullable=False, server_default=sa.text("''")),
|
||||
sa.Column("model_preference", sa.String(50), nullable=False, server_default=sa.text("'quality'")),
|
||||
sa.Column("tool_assignments", sa.JSON, nullable=False, server_default=sa.text("'[]'")),
|
||||
sa.Column("escalation_rules", sa.JSON, nullable=False, server_default=sa.text("'[]'")),
|
||||
sa.Column("is_active", sa.Boolean, nullable=False, server_default=sa.text("TRUE")),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.text("NOW()")),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=sa.text("NOW()"),
|
||||
),
|
||||
)
|
||||
op.create_index("ix_agents_tenant_id", "agents", ["tenant_id"])
|
||||
|
||||
# Apply RLS to agents — FORCE ensures even table owner cannot bypass
|
||||
op.execute("ALTER TABLE agents ENABLE ROW LEVEL SECURITY")
|
||||
op.execute("ALTER TABLE agents FORCE ROW LEVEL SECURITY")
|
||||
op.execute("""
|
||||
CREATE POLICY tenant_isolation ON agents
|
||||
USING (tenant_id = current_setting('app.current_tenant', TRUE)::uuid)
|
||||
""")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# 5. Create channel_connections table with RLS
|
||||
# Use sa.Text for channel_type column — cast to enum_type in app code.
|
||||
# The channel_type_enum was created above via raw DDL.
|
||||
# We reference it here using sa.text cast to avoid SQLAlchemy auto-emit.
|
||||
# -------------------------------------------------------------------------
|
||||
op.create_table(
|
||||
"channel_connections",
|
||||
sa.Column("id", UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")),
|
||||
sa.Column("tenant_id", UUID(as_uuid=True), sa.ForeignKey("tenants.id", ondelete="CASCADE"), nullable=False),
|
||||
sa.Column(
|
||||
"channel_type",
|
||||
sa.Text, # Stored as text, constrained by CHECK to valid enum values
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("workspace_id", sa.String(255), nullable=False),
|
||||
sa.Column("config", sa.JSON, nullable=False, server_default=sa.text("'{}'")),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.text("NOW()")),
|
||||
sa.UniqueConstraint("channel_type", "workspace_id", name="uq_channel_workspace"),
|
||||
)
|
||||
op.create_index("ix_channel_connections_tenant_id", "channel_connections", ["tenant_id"])
|
||||
|
||||
# Add CHECK constraint to enforce valid channel types (reuses enum values)
|
||||
op.execute(
|
||||
"ALTER TABLE channel_connections ADD CONSTRAINT chk_channel_type "
|
||||
f"CHECK (channel_type IN {tuple(_CHANNEL_TYPES)})"
|
||||
)
|
||||
|
||||
# Apply RLS to channel_connections
|
||||
op.execute("ALTER TABLE channel_connections ENABLE ROW LEVEL SECURITY")
|
||||
op.execute("ALTER TABLE channel_connections FORCE ROW LEVEL SECURITY")
|
||||
op.execute("""
|
||||
CREATE POLICY tenant_isolation ON channel_connections
|
||||
USING (tenant_id = current_setting('app.current_tenant', TRUE)::uuid)
|
||||
""")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# 6. Create portal_users table (no RLS — auth happens before tenant context)
|
||||
# -------------------------------------------------------------------------
|
||||
op.create_table(
|
||||
"portal_users",
|
||||
sa.Column("id", UUID(as_uuid=True), primary_key=True, server_default=sa.text("gen_random_uuid()")),
|
||||
sa.Column("email", sa.String(255), nullable=False, unique=True),
|
||||
sa.Column("hashed_password", sa.String(255), nullable=False),
|
||||
sa.Column("name", sa.String(255), nullable=False),
|
||||
sa.Column("is_admin", sa.Boolean, nullable=False, server_default=sa.text("FALSE")),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False, server_default=sa.text("NOW()")),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=sa.text("NOW()"),
|
||||
),
|
||||
)
|
||||
op.create_index("ix_portal_users_email", "portal_users", ["email"])
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# 7. Grant table permissions to konstruct_app role
|
||||
# -------------------------------------------------------------------------
|
||||
op.execute("GRANT SELECT, INSERT, UPDATE, DELETE ON tenants TO konstruct_app")
|
||||
op.execute("GRANT SELECT, INSERT, UPDATE, DELETE ON agents TO konstruct_app")
|
||||
op.execute("GRANT SELECT, INSERT, UPDATE, DELETE ON channel_connections TO konstruct_app")
|
||||
op.execute("GRANT SELECT, INSERT, UPDATE, DELETE ON portal_users TO konstruct_app")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Revoke grants
|
||||
op.execute("REVOKE ALL ON portal_users FROM konstruct_app")
|
||||
op.execute("REVOKE ALL ON channel_connections FROM konstruct_app")
|
||||
op.execute("REVOKE ALL ON agents FROM konstruct_app")
|
||||
op.execute("REVOKE ALL ON tenants FROM konstruct_app")
|
||||
|
||||
# Drop tables
|
||||
op.drop_table("portal_users")
|
||||
op.drop_table("channel_connections")
|
||||
op.drop_table("agents")
|
||||
op.drop_table("tenants")
|
||||
|
||||
# Drop enum type
|
||||
op.execute("DROP TYPE IF EXISTS channel_type_enum")
|
||||
Reference in New Issue
Block a user