Add comprehensive database integration, authentication, and admin dashboard

This commit introduces major enhancements to Talk2Me:

## Database Integration
- PostgreSQL support with SQLAlchemy ORM
- Redis integration for caching and real-time analytics
- Automated database initialization scripts
- Migration support infrastructure

## User Authentication System
- JWT-based API authentication
- Session-based web authentication
- API key authentication for programmatic access
- User roles and permissions (admin/user)
- Login history and session tracking
- Rate limiting per user with customizable limits

## Admin Dashboard
- Real-time analytics and monitoring
- User management interface (create, edit, delete users)
- System health monitoring
- Request/error tracking
- Language pair usage statistics
- Performance metrics visualization

## Key Features
- Dual authentication support (token + user accounts)
- Graceful fallback for missing services
- Non-blocking analytics middleware
- Comprehensive error handling
- Session management with security features

## Bug Fixes
- Fixed rate limiting bypass for admin routes
- Added missing email validation method
- Improved error handling for missing database tables
- Fixed session-based authentication for API endpoints

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-06-03 18:21:56 -06:00
parent d818ec7d73
commit fa951c3141
41 changed files with 10120 additions and 325 deletions

View File

@@ -0,0 +1,216 @@
"""Add user authentication tables and update existing models
This migration:
1. Creates user authentication tables (users, login_history, user_sessions, revoked_tokens)
2. Updates translation and transcription tables to link to users
3. Adds proper foreign key constraints and indexes
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
import uuid
# revision identifiers
revision = 'add_user_authentication'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# Create users table
op.create_table('users',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False, default=uuid.uuid4),
sa.Column('email', sa.String(255), nullable=False),
sa.Column('username', sa.String(100), nullable=False),
sa.Column('password_hash', sa.String(255), nullable=False),
sa.Column('full_name', sa.String(255), nullable=True),
sa.Column('avatar_url', sa.String(500), nullable=True),
sa.Column('api_key', sa.String(64), nullable=False),
sa.Column('api_key_created_at', sa.DateTime(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False, default=True),
sa.Column('is_verified', sa.Boolean(), nullable=False, default=False),
sa.Column('is_suspended', sa.Boolean(), nullable=False, default=False),
sa.Column('suspension_reason', sa.Text(), nullable=True),
sa.Column('suspended_at', sa.DateTime(), nullable=True),
sa.Column('suspended_until', sa.DateTime(), nullable=True),
sa.Column('role', sa.String(20), nullable=False, default='user'),
sa.Column('permissions', postgresql.JSONB(astext_type=sa.Text()), nullable=False, default=[]),
sa.Column('rate_limit_per_minute', sa.Integer(), nullable=False, default=30),
sa.Column('rate_limit_per_hour', sa.Integer(), nullable=False, default=500),
sa.Column('rate_limit_per_day', sa.Integer(), nullable=False, default=5000),
sa.Column('total_requests', sa.Integer(), nullable=False, default=0),
sa.Column('total_translations', sa.Integer(), nullable=False, default=0),
sa.Column('total_transcriptions', sa.Integer(), nullable=False, default=0),
sa.Column('total_tts_requests', sa.Integer(), nullable=False, default=0),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('last_login_at', sa.DateTime(), nullable=True),
sa.Column('last_active_at', sa.DateTime(), nullable=True),
sa.Column('password_changed_at', sa.DateTime(), nullable=False),
sa.Column('failed_login_attempts', sa.Integer(), nullable=False, default=0),
sa.Column('locked_until', sa.DateTime(), nullable=True),
sa.Column('settings', postgresql.JSONB(astext_type=sa.Text()), nullable=False, default={}),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username'),
sa.UniqueConstraint('api_key')
)
# Create indexes on users table
op.create_index('idx_users_email', 'users', ['email'])
op.create_index('idx_users_username', 'users', ['username'])
op.create_index('idx_users_api_key', 'users', ['api_key'])
op.create_index('idx_users_email_active', 'users', ['email', 'is_active'])
op.create_index('idx_users_role_active', 'users', ['role', 'is_active'])
op.create_index('idx_users_created_at', 'users', ['created_at'])
# Create login_history table
op.create_table('login_history',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False, default=uuid.uuid4),
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('login_at', sa.DateTime(), nullable=False),
sa.Column('logout_at', sa.DateTime(), nullable=True),
sa.Column('login_method', sa.String(20), nullable=False),
sa.Column('success', sa.Boolean(), nullable=False),
sa.Column('failure_reason', sa.String(255), nullable=True),
sa.Column('session_id', sa.String(255), nullable=True),
sa.Column('jwt_jti', sa.String(255), nullable=True),
sa.Column('ip_address', sa.String(45), nullable=False),
sa.Column('user_agent', sa.String(500), nullable=True),
sa.Column('device_info', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('country', sa.String(2), nullable=True),
sa.Column('city', sa.String(100), nullable=True),
sa.Column('is_suspicious', sa.Boolean(), nullable=False, default=False),
sa.Column('security_notes', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# Create indexes on login_history
op.create_index('idx_login_history_user_id', 'login_history', ['user_id'])
op.create_index('idx_login_history_user_time', 'login_history', ['user_id', 'login_at'])
op.create_index('idx_login_history_session', 'login_history', ['session_id'])
op.create_index('idx_login_history_jwt_jti', 'login_history', ['jwt_jti'])
op.create_index('idx_login_history_ip', 'login_history', ['ip_address'])
# Create user_sessions table
op.create_table('user_sessions',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False, default=uuid.uuid4),
sa.Column('session_id', sa.String(255), nullable=False),
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('access_token_jti', sa.String(255), nullable=True),
sa.Column('refresh_token_jti', sa.String(255), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('last_active_at', sa.DateTime(), nullable=False),
sa.Column('expires_at', sa.DateTime(), nullable=False),
sa.Column('ip_address', sa.String(45), nullable=False),
sa.Column('user_agent', sa.String(500), nullable=True),
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=False, default={}),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('session_id')
)
# Create indexes on user_sessions
op.create_index('idx_user_sessions_session_id', 'user_sessions', ['session_id'])
op.create_index('idx_user_sessions_user_id', 'user_sessions', ['user_id'])
op.create_index('idx_user_sessions_user_active', 'user_sessions', ['user_id', 'expires_at'])
op.create_index('idx_user_sessions_token', 'user_sessions', ['access_token_jti'])
# Create revoked_tokens table
op.create_table('revoked_tokens',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False, default=uuid.uuid4),
sa.Column('jti', sa.String(255), nullable=False),
sa.Column('token_type', sa.String(20), nullable=False),
sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('revoked_at', sa.DateTime(), nullable=False),
sa.Column('expires_at', sa.DateTime(), nullable=False),
sa.Column('reason', sa.String(255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('jti')
)
# Create indexes on revoked_tokens
op.create_index('idx_revoked_tokens_jti', 'revoked_tokens', ['jti'])
op.create_index('idx_revoked_tokens_user_id', 'revoked_tokens', ['user_id'])
op.create_index('idx_revoked_tokens_expires', 'revoked_tokens', ['expires_at'])
# Update translations table to add user_id with proper foreign key
# First, check if user_id column exists
try:
op.add_column('translations', sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=True))
op.create_foreign_key('fk_translations_user_id', 'translations', 'users', ['user_id'], ['id'], ondelete='SET NULL')
op.create_index('idx_translations_user_id', 'translations', ['user_id'])
except:
pass # Column might already exist
# Update transcriptions table to add user_id with proper foreign key
try:
op.add_column('transcriptions', sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=True))
op.create_foreign_key('fk_transcriptions_user_id', 'transcriptions', 'users', ['user_id'], ['id'], ondelete='SET NULL')
op.create_index('idx_transcriptions_user_id', 'transcriptions', ['user_id'])
except:
pass # Column might already exist
# Update user_preferences table to add proper foreign key if not exists
try:
op.create_foreign_key('fk_user_preferences_user_id', 'user_preferences', 'users', ['user_id'], ['id'], ondelete='CASCADE')
except:
pass # Foreign key might already exist
# Update api_keys table to add proper foreign key if not exists
try:
op.add_column('api_keys', sa.Column('user_id_new', postgresql.UUID(as_uuid=True), nullable=True))
op.create_foreign_key('fk_api_keys_user_id', 'api_keys', 'users', ['user_id_new'], ['id'], ondelete='CASCADE')
except:
pass # Column/FK might already exist
# Create function for updating updated_at timestamp
op.execute("""
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ language 'plpgsql';
""")
# Create trigger for users table
op.execute("""
CREATE TRIGGER update_users_updated_at
BEFORE UPDATE ON users
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
""")
def downgrade():
# Drop triggers
op.execute("DROP TRIGGER IF EXISTS update_users_updated_at ON users")
op.execute("DROP FUNCTION IF EXISTS update_updated_at_column()")
# Drop foreign keys
try:
op.drop_constraint('fk_translations_user_id', 'translations', type_='foreignkey')
op.drop_constraint('fk_transcriptions_user_id', 'transcriptions', type_='foreignkey')
op.drop_constraint('fk_user_preferences_user_id', 'user_preferences', type_='foreignkey')
op.drop_constraint('fk_api_keys_user_id', 'api_keys', type_='foreignkey')
except:
pass
# Drop columns
try:
op.drop_column('translations', 'user_id')
op.drop_column('transcriptions', 'user_id')
op.drop_column('api_keys', 'user_id_new')
except:
pass
# Drop tables
op.drop_table('revoked_tokens')
op.drop_table('user_sessions')
op.drop_table('login_history')
op.drop_table('users')

View File

@@ -0,0 +1,135 @@
-- Create analytics tables for Talk2Me admin dashboard
-- Error logs table
CREATE TABLE IF NOT EXISTS error_logs (
id SERIAL PRIMARY KEY,
error_type VARCHAR(100) NOT NULL,
error_message TEXT,
endpoint VARCHAR(255),
method VARCHAR(10),
status_code INTEGER,
ip_address INET,
user_agent TEXT,
request_id VARCHAR(100),
stack_trace TEXT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for error_logs
CREATE INDEX IF NOT EXISTS idx_error_logs_created_at ON error_logs(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_error_logs_error_type ON error_logs(error_type);
CREATE INDEX IF NOT EXISTS idx_error_logs_endpoint ON error_logs(endpoint);
-- Request logs table for detailed analytics
CREATE TABLE IF NOT EXISTS request_logs (
id SERIAL PRIMARY KEY,
endpoint VARCHAR(255) NOT NULL,
method VARCHAR(10) NOT NULL,
status_code INTEGER,
response_time_ms INTEGER,
ip_address INET,
user_agent TEXT,
request_size_bytes INTEGER,
response_size_bytes INTEGER,
session_id VARCHAR(100),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for request_logs
CREATE INDEX IF NOT EXISTS idx_request_logs_created_at ON request_logs(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_request_logs_endpoint ON request_logs(endpoint);
CREATE INDEX IF NOT EXISTS idx_request_logs_session_id ON request_logs(session_id);
CREATE INDEX IF NOT EXISTS idx_request_logs_response_time ON request_logs(response_time_ms);
-- Translation logs table
CREATE TABLE IF NOT EXISTS translation_logs (
id SERIAL PRIMARY KEY,
source_language VARCHAR(10),
target_language VARCHAR(10),
text_length INTEGER,
response_time_ms INTEGER,
success BOOLEAN DEFAULT TRUE,
error_message TEXT,
session_id VARCHAR(100),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for translation_logs
CREATE INDEX IF NOT EXISTS idx_translation_logs_created_at ON translation_logs(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_translation_logs_languages ON translation_logs(source_language, target_language);
-- Transcription logs table
CREATE TABLE IF NOT EXISTS transcription_logs (
id SERIAL PRIMARY KEY,
detected_language VARCHAR(10),
audio_duration_seconds FLOAT,
file_size_bytes INTEGER,
response_time_ms INTEGER,
success BOOLEAN DEFAULT TRUE,
error_message TEXT,
session_id VARCHAR(100),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for transcription_logs
CREATE INDEX IF NOT EXISTS idx_transcription_logs_created_at ON transcription_logs(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_transcription_logs_language ON transcription_logs(detected_language);
-- TTS logs table
CREATE TABLE IF NOT EXISTS tts_logs (
id SERIAL PRIMARY KEY,
language VARCHAR(10),
text_length INTEGER,
voice VARCHAR(50),
response_time_ms INTEGER,
success BOOLEAN DEFAULT TRUE,
error_message TEXT,
session_id VARCHAR(100),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for tts_logs
CREATE INDEX IF NOT EXISTS idx_tts_logs_created_at ON tts_logs(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_tts_logs_language ON tts_logs(language);
-- Daily aggregated stats table for faster queries
CREATE TABLE IF NOT EXISTS daily_stats (
date DATE PRIMARY KEY,
total_requests INTEGER DEFAULT 0,
total_translations INTEGER DEFAULT 0,
total_transcriptions INTEGER DEFAULT 0,
total_tts INTEGER DEFAULT 0,
total_errors INTEGER DEFAULT 0,
unique_sessions INTEGER DEFAULT 0,
avg_response_time_ms FLOAT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create function to update updated_at timestamp
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ language 'plpgsql';
-- Create trigger for daily_stats
DROP TRIGGER IF EXISTS update_daily_stats_updated_at ON daily_stats;
CREATE TRIGGER update_daily_stats_updated_at
BEFORE UPDATE ON daily_stats
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
-- Create view for language pair statistics
CREATE OR REPLACE VIEW language_pair_stats AS
SELECT
source_language || ' -> ' || target_language as language_pair,
COUNT(*) as usage_count,
AVG(response_time_ms) as avg_response_time,
MAX(created_at) as last_used
FROM translation_logs
WHERE success = TRUE
GROUP BY source_language, target_language
ORDER BY usage_count DESC;