fix: runtime deployment fixes for Docker Compose stack

- Add .gitignore for __pycache__, node_modules, .playwright-mcp
- Add CLAUDE.md project instructions
- docker-compose: remove host port exposure for internal services,
  remove Ollama container (use host), add CORS origin, bake
  NEXT_PUBLIC_API_URL at build time, run alembic migrations on
  gateway startup, add CPU-only torch pre-install
- gateway: add CORS middleware, graceful Slack degradation without
  bot token, fix None guard on slack_handler
- gateway pyproject: add aiohttp dependency for slack-bolt async
- llm-pool pyproject: install litellm from GitHub (removed from PyPI),
  enable hatch direct references
- portal: enable standalone output in next.config.ts
- Remove orphaned migration 003_phase2_audit_kb.py (renamed to 004)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-24 12:26:34 -06:00
parent d936bcf361
commit 0e0ea5fb66
9 changed files with 694 additions and 293 deletions

View File

@@ -1,5 +1,3 @@
version: "3.9"
networks:
konstruct-net:
driver: bridge
@@ -7,7 +5,6 @@ networks:
volumes:
postgres_data:
redis_data:
ollama_data:
services:
postgres:
@@ -20,8 +17,6 @@ services:
volumes:
- postgres_data:/var/lib/postgresql/data
- ./scripts/init-db.sh:/docker-entrypoint-initdb.d/init-db.sh:ro
ports:
- "5432:5432"
networks:
- konstruct-net
healthcheck:
@@ -36,8 +31,6 @@ services:
command: redis-server --save 60 1 --loglevel warning
volumes:
- redis_data:/data
ports:
- "6379:6379"
networks:
- konstruct-net
healthcheck:
@@ -46,24 +39,7 @@ services:
timeout: 5s
retries: 10
ollama:
image: ollama/ollama:latest
container_name: konstruct-ollama
volumes:
- ollama_data:/root/.ollama
ports:
- "11434:11434"
networks:
- konstruct-net
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
# Service starts even if no GPU is available — GPU config is optional
restart: unless-stopped
# Ollama runs on the host (port 11434) — containers access it via host.docker.internal
llm-pool:
build:
@@ -71,26 +47,26 @@ services:
dockerfile_inline: |
FROM python:3.12-slim
WORKDIR /app
RUN apt-get update -qq && apt-get install -y -qq git curl > /dev/null 2>&1
RUN pip install uv
COPY pyproject.toml ./
COPY packages/shared ./packages/shared
COPY packages/llm-pool ./packages/llm-pool
RUN uv pip install --system -e packages/shared -e packages/llm-pool
RUN uv pip install --system torch --index-url https://download.pytorch.org/whl/cpu
RUN uv pip install --system -e packages/shared -e "packages/llm-pool"
CMD ["uvicorn", "llm_pool.main:app", "--host", "0.0.0.0", "--port", "8004"]
container_name: konstruct-llm-pool
ports:
- "8004:8004"
extra_hosts:
- "host.docker.internal:host-gateway"
networks:
- konstruct-net
depends_on:
ollama:
condition: service_started
redis:
condition: service_healthy
environment:
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-}
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
- OLLAMA_BASE_URL=http://ollama:11434
- OLLAMA_BASE_URL=http://host.docker.internal:11434
- REDIS_URL=redis://redis:6379/0
- LOG_LEVEL=INFO
restart: unless-stopped
@@ -109,6 +85,7 @@ services:
COPY package.json package-lock.json* ./
RUN npm ci --production=false
COPY . .
ENV NEXT_PUBLIC_API_URL=http://100.64.0.10:8001
RUN npm run build
FROM node:22-alpine AS runner
WORKDIR /app
@@ -126,9 +103,9 @@ services:
environment:
- NODE_ENV=production
- API_URL=http://gateway:8001
- NEXT_PUBLIC_API_URL=http://localhost:8001
- NEXT_PUBLIC_API_URL=http://100.64.0.10:8001
- AUTH_SECRET=${AUTH_SECRET:-insecure-dev-secret-change-in-production}
- AUTH_URL=http://localhost:3000
- AUTH_URL=http://100.64.0.10:3000
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "wget -q --spider http://localhost:3000 || exit 1"]
@@ -142,14 +119,18 @@ services:
dockerfile_inline: |
FROM python:3.12-slim
WORKDIR /app
RUN apt-get update -qq && apt-get install -y -qq git curl > /dev/null 2>&1
RUN pip install uv
COPY pyproject.toml ./
COPY packages/shared ./packages/shared
COPY packages/router ./packages/router
COPY packages/gateway ./packages/gateway
COPY packages/orchestrator ./packages/orchestrator
COPY migrations ./migrations
COPY alembic.ini ./
RUN uv pip install --system torch --index-url https://download.pytorch.org/whl/cpu
RUN uv pip install --system -e packages/shared -e packages/router -e packages/gateway -e packages/orchestrator
CMD ["uvicorn", "gateway.main:app", "--host", "0.0.0.0", "--port", "8001"]
CMD ["sh", "-c", "alembic upgrade head && uvicorn gateway.main:app --host 0.0.0.0 --port 8001"]
container_name: konstruct-gateway
ports:
- "8001:8001"
@@ -164,6 +145,7 @@ services:
condition: service_started
environment:
- DATABASE_URL=postgresql+asyncpg://konstruct_app:konstruct_dev@postgres:5432/konstruct
- DATABASE_ADMIN_URL=postgresql+asyncpg://postgres:postgres_dev@postgres:5432/konstruct
- REDIS_URL=redis://redis:6379/0
- CELERY_BROKER_URL=redis://redis:6379/1
- CELERY_RESULT_BACKEND=redis://redis:6379/2
@@ -184,13 +166,19 @@ services:
dockerfile_inline: |
FROM python:3.12-slim
WORKDIR /app
RUN apt-get update -qq && apt-get install -y -qq git curl > /dev/null 2>&1
RUN pip install uv
COPY pyproject.toml ./
COPY packages/shared ./packages/shared
COPY packages/router ./packages/router
COPY packages/gateway ./packages/gateway
COPY packages/orchestrator ./packages/orchestrator
RUN uv pip install --system -e packages/shared -e packages/orchestrator
RUN uv pip install --system torch --index-url https://download.pytorch.org/whl/cpu
RUN uv pip install --system -e packages/shared -e packages/router -e packages/gateway -e packages/orchestrator
CMD ["celery", "-A", "orchestrator.main", "worker", "--loglevel=info"]
container_name: konstruct-celery-worker
extra_hosts:
- "host.docker.internal:host-gateway"
networks:
- konstruct-net
depends_on:
@@ -208,6 +196,6 @@ services:
- LLM_POOL_URL=http://llm-pool:8004
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-}
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
- OLLAMA_BASE_URL=http://ollama:11434
- OLLAMA_BASE_URL=http://host.docker.internal:11434
- LOG_LEVEL=INFO
restart: unless-stopped