This commit introduces major enhancements to Talk2Me: ## Database Integration - PostgreSQL support with SQLAlchemy ORM - Redis integration for caching and real-time analytics - Automated database initialization scripts - Migration support infrastructure ## User Authentication System - JWT-based API authentication - Session-based web authentication - API key authentication for programmatic access - User roles and permissions (admin/user) - Login history and session tracking - Rate limiting per user with customizable limits ## Admin Dashboard - Real-time analytics and monitoring - User management interface (create, edit, delete users) - System health monitoring - Request/error tracking - Language pair usage statistics - Performance metrics visualization ## Key Features - Dual authentication support (token + user accounts) - Graceful fallback for missing services - Non-blocking analytics middleware - Comprehensive error handling - Session management with security features ## Bug Fixes - Fixed rate limiting bypass for admin routes - Added missing email validation method - Improved error handling for missing database tables - Fixed session-based authentication for API endpoints 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
72 lines
2.1 KiB
Python
Executable File
72 lines
2.1 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""Initialize analytics database tables"""
|
|
|
|
import os
|
|
import sys
|
|
import psycopg2
|
|
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
|
|
import logging
|
|
from dotenv import load_dotenv
|
|
|
|
# Load environment variables
|
|
load_dotenv()
|
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
def init_analytics_db():
|
|
"""Initialize analytics database tables"""
|
|
|
|
# Get database URL from environment
|
|
database_url = os.environ.get('DATABASE_URL', 'postgresql://localhost/talk2me')
|
|
|
|
try:
|
|
# Connect to PostgreSQL
|
|
conn = psycopg2.connect(database_url)
|
|
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
|
|
cursor = conn.cursor()
|
|
|
|
logger.info("Connected to PostgreSQL database")
|
|
|
|
# Read SQL file
|
|
sql_file = os.path.join(os.path.dirname(__file__), 'migrations', 'create_analytics_tables.sql')
|
|
|
|
if not os.path.exists(sql_file):
|
|
logger.error(f"SQL file not found: {sql_file}")
|
|
return False
|
|
|
|
with open(sql_file, 'r') as f:
|
|
sql_content = f.read()
|
|
|
|
# Execute SQL commands
|
|
logger.info("Creating analytics tables...")
|
|
cursor.execute(sql_content)
|
|
|
|
logger.info("Analytics tables created successfully!")
|
|
|
|
# Verify tables were created
|
|
cursor.execute("""
|
|
SELECT table_name
|
|
FROM information_schema.tables
|
|
WHERE table_schema = 'public'
|
|
AND table_name IN (
|
|
'error_logs', 'request_logs', 'translation_logs',
|
|
'transcription_logs', 'tts_logs', 'daily_stats'
|
|
)
|
|
""")
|
|
|
|
created_tables = [row[0] for row in cursor.fetchall()]
|
|
logger.info(f"Created tables: {', '.join(created_tables)}")
|
|
|
|
cursor.close()
|
|
conn.close()
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to initialize analytics database: {e}")
|
|
return False
|
|
|
|
if __name__ == "__main__":
|
|
success = init_analytics_db()
|
|
sys.exit(0 if success else 1) |