talk2me/migrations/create_analytics_tables.sql
Adolfo Delorenzo fa951c3141 Add comprehensive database integration, authentication, and admin dashboard
This commit introduces major enhancements to Talk2Me:

## Database Integration
- PostgreSQL support with SQLAlchemy ORM
- Redis integration for caching and real-time analytics
- Automated database initialization scripts
- Migration support infrastructure

## User Authentication System
- JWT-based API authentication
- Session-based web authentication
- API key authentication for programmatic access
- User roles and permissions (admin/user)
- Login history and session tracking
- Rate limiting per user with customizable limits

## Admin Dashboard
- Real-time analytics and monitoring
- User management interface (create, edit, delete users)
- System health monitoring
- Request/error tracking
- Language pair usage statistics
- Performance metrics visualization

## Key Features
- Dual authentication support (token + user accounts)
- Graceful fallback for missing services
- Non-blocking analytics middleware
- Comprehensive error handling
- Session management with security features

## Bug Fixes
- Fixed rate limiting bypass for admin routes
- Added missing email validation method
- Improved error handling for missing database tables
- Fixed session-based authentication for API endpoints

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-06-03 18:21:56 -06:00

135 lines
4.6 KiB
PL/PgSQL

-- Create analytics tables for Talk2Me admin dashboard
-- Error logs table
CREATE TABLE IF NOT EXISTS error_logs (
id SERIAL PRIMARY KEY,
error_type VARCHAR(100) NOT NULL,
error_message TEXT,
endpoint VARCHAR(255),
method VARCHAR(10),
status_code INTEGER,
ip_address INET,
user_agent TEXT,
request_id VARCHAR(100),
stack_trace TEXT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for error_logs
CREATE INDEX IF NOT EXISTS idx_error_logs_created_at ON error_logs(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_error_logs_error_type ON error_logs(error_type);
CREATE INDEX IF NOT EXISTS idx_error_logs_endpoint ON error_logs(endpoint);
-- Request logs table for detailed analytics
CREATE TABLE IF NOT EXISTS request_logs (
id SERIAL PRIMARY KEY,
endpoint VARCHAR(255) NOT NULL,
method VARCHAR(10) NOT NULL,
status_code INTEGER,
response_time_ms INTEGER,
ip_address INET,
user_agent TEXT,
request_size_bytes INTEGER,
response_size_bytes INTEGER,
session_id VARCHAR(100),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for request_logs
CREATE INDEX IF NOT EXISTS idx_request_logs_created_at ON request_logs(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_request_logs_endpoint ON request_logs(endpoint);
CREATE INDEX IF NOT EXISTS idx_request_logs_session_id ON request_logs(session_id);
CREATE INDEX IF NOT EXISTS idx_request_logs_response_time ON request_logs(response_time_ms);
-- Translation logs table
CREATE TABLE IF NOT EXISTS translation_logs (
id SERIAL PRIMARY KEY,
source_language VARCHAR(10),
target_language VARCHAR(10),
text_length INTEGER,
response_time_ms INTEGER,
success BOOLEAN DEFAULT TRUE,
error_message TEXT,
session_id VARCHAR(100),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for translation_logs
CREATE INDEX IF NOT EXISTS idx_translation_logs_created_at ON translation_logs(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_translation_logs_languages ON translation_logs(source_language, target_language);
-- Transcription logs table
CREATE TABLE IF NOT EXISTS transcription_logs (
id SERIAL PRIMARY KEY,
detected_language VARCHAR(10),
audio_duration_seconds FLOAT,
file_size_bytes INTEGER,
response_time_ms INTEGER,
success BOOLEAN DEFAULT TRUE,
error_message TEXT,
session_id VARCHAR(100),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for transcription_logs
CREATE INDEX IF NOT EXISTS idx_transcription_logs_created_at ON transcription_logs(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_transcription_logs_language ON transcription_logs(detected_language);
-- TTS logs table
CREATE TABLE IF NOT EXISTS tts_logs (
id SERIAL PRIMARY KEY,
language VARCHAR(10),
text_length INTEGER,
voice VARCHAR(50),
response_time_ms INTEGER,
success BOOLEAN DEFAULT TRUE,
error_message TEXT,
session_id VARCHAR(100),
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create indexes for tts_logs
CREATE INDEX IF NOT EXISTS idx_tts_logs_created_at ON tts_logs(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_tts_logs_language ON tts_logs(language);
-- Daily aggregated stats table for faster queries
CREATE TABLE IF NOT EXISTS daily_stats (
date DATE PRIMARY KEY,
total_requests INTEGER DEFAULT 0,
total_translations INTEGER DEFAULT 0,
total_transcriptions INTEGER DEFAULT 0,
total_tts INTEGER DEFAULT 0,
total_errors INTEGER DEFAULT 0,
unique_sessions INTEGER DEFAULT 0,
avg_response_time_ms FLOAT,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
);
-- Create function to update updated_at timestamp
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ language 'plpgsql';
-- Create trigger for daily_stats
DROP TRIGGER IF EXISTS update_daily_stats_updated_at ON daily_stats;
CREATE TRIGGER update_daily_stats_updated_at
BEFORE UPDATE ON daily_stats
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
-- Create view for language pair statistics
CREATE OR REPLACE VIEW language_pair_stats AS
SELECT
source_language || ' -> ' || target_language as language_pair,
COUNT(*) as usage_count,
AVG(response_time_ms) as avg_response_time,
MAX(created_at) as last_used
FROM translation_logs
WHERE success = TRUE
GROUP BY source_language, target_language
ORDER BY usage_count DESC;