mirror of
https://github.com/aljazceru/enclava.git
synced 2025-12-18 07:54:29 +01:00
cleanup
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
*.backup
|
||||
backend/storage/rag_documents/*
|
||||
# Python
|
||||
__pycache__/
|
||||
|
||||
@@ -1,251 +0,0 @@
|
||||
"""Initial schema
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2025-01-01 00:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '001'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create users table
|
||||
op.create_table('users',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('email', sa.String(), nullable=False),
|
||||
sa.Column('username', sa.String(), nullable=False),
|
||||
sa.Column('hashed_password', sa.String(), nullable=False),
|
||||
sa.Column('full_name', sa.String(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_superuser', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_verified', sa.Boolean(), nullable=True),
|
||||
sa.Column('role', sa.String(), nullable=True),
|
||||
sa.Column('permissions', sa.JSON(), nullable=True),
|
||||
sa.Column('avatar_url', sa.String(), nullable=True),
|
||||
sa.Column('bio', sa.Text(), nullable=True),
|
||||
sa.Column('company', sa.String(), nullable=True),
|
||||
sa.Column('website', sa.String(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('last_login', sa.DateTime(), nullable=True),
|
||||
sa.Column('preferences', sa.JSON(), nullable=True),
|
||||
sa.Column('notification_settings', sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
|
||||
op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
|
||||
|
||||
# Create api_keys table
|
||||
op.create_table('api_keys',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('key_hash', sa.String(), nullable=False),
|
||||
sa.Column('key_prefix', sa.String(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('permissions', sa.JSON(), nullable=True),
|
||||
sa.Column('scopes', sa.JSON(), nullable=True),
|
||||
sa.Column('rate_limit_per_minute', sa.Integer(), nullable=True),
|
||||
sa.Column('rate_limit_per_hour', sa.Integer(), nullable=True),
|
||||
sa.Column('rate_limit_per_day', sa.Integer(), nullable=True),
|
||||
sa.Column('allowed_models', sa.JSON(), nullable=True),
|
||||
sa.Column('allowed_endpoints', sa.JSON(), nullable=True),
|
||||
sa.Column('allowed_ips', sa.JSON(), nullable=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('tags', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('last_used_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('expires_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('total_requests', sa.Integer(), nullable=True),
|
||||
sa.Column('total_tokens', sa.Integer(), nullable=True),
|
||||
sa.Column('total_cost', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_api_keys_id'), 'api_keys', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_api_keys_key_hash'), 'api_keys', ['key_hash'], unique=True)
|
||||
op.create_index(op.f('ix_api_keys_key_prefix'), 'api_keys', ['key_prefix'], unique=False)
|
||||
|
||||
# Create budgets table
|
||||
op.create_table('budgets',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.String(), nullable=True),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('limit_amount', sa.Float(), nullable=False),
|
||||
sa.Column('currency', sa.String(), nullable=True),
|
||||
sa.Column('period', sa.String(), nullable=True),
|
||||
sa.Column('current_usage', sa.Float(), nullable=True),
|
||||
sa.Column('remaining_amount', sa.Float(), nullable=True),
|
||||
sa.Column('status', sa.String(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('alert_thresholds', sa.JSON(), nullable=True),
|
||||
sa.Column('alerts_sent', sa.JSON(), nullable=True),
|
||||
sa.Column('auto_suspend_on_exceed', sa.Boolean(), nullable=True),
|
||||
sa.Column('auto_notify_on_exceed', sa.Boolean(), nullable=True),
|
||||
sa.Column('period_start', sa.DateTime(), nullable=False),
|
||||
sa.Column('period_end', sa.DateTime(), nullable=False),
|
||||
sa.Column('allowed_models', sa.JSON(), nullable=True),
|
||||
sa.Column('allowed_endpoints', sa.JSON(), nullable=True),
|
||||
sa.Column('user_groups', sa.JSON(), nullable=True),
|
||||
sa.Column('tags', sa.JSON(), nullable=True),
|
||||
sa.Column('metadata', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('last_reset_at', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_budgets_id'), 'budgets', ['id'], unique=False)
|
||||
|
||||
# Create usage_tracking table
|
||||
op.create_table('usage_tracking',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('request_id', sa.String(), nullable=False),
|
||||
sa.Column('session_id', sa.String(), nullable=True),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('api_key_id', sa.Integer(), nullable=True),
|
||||
sa.Column('endpoint', sa.String(), nullable=False),
|
||||
sa.Column('method', sa.String(), nullable=False),
|
||||
sa.Column('user_agent', sa.String(), nullable=True),
|
||||
sa.Column('ip_address', sa.String(), nullable=True),
|
||||
sa.Column('model_name', sa.String(), nullable=True),
|
||||
sa.Column('provider', sa.String(), nullable=True),
|
||||
sa.Column('model_version', sa.String(), nullable=True),
|
||||
sa.Column('request_data', sa.JSON(), nullable=True),
|
||||
sa.Column('response_data', sa.JSON(), nullable=True),
|
||||
sa.Column('prompt_tokens', sa.Integer(), nullable=True),
|
||||
sa.Column('completion_tokens', sa.Integer(), nullable=True),
|
||||
sa.Column('total_tokens', sa.Integer(), nullable=True),
|
||||
sa.Column('cost_per_token', sa.Float(), nullable=True),
|
||||
sa.Column('total_cost', sa.Float(), nullable=True),
|
||||
sa.Column('currency', sa.String(), nullable=True),
|
||||
sa.Column('response_time', sa.Float(), nullable=True),
|
||||
sa.Column('queue_time', sa.Float(), nullable=True),
|
||||
sa.Column('processing_time', sa.Float(), nullable=True),
|
||||
sa.Column('status', sa.String(), nullable=True),
|
||||
sa.Column('status_code', sa.Integer(), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('error_type', sa.String(), nullable=True),
|
||||
sa.Column('modules_used', sa.JSON(), nullable=True),
|
||||
sa.Column('interceptor_chain', sa.JSON(), nullable=True),
|
||||
sa.Column('tags', sa.JSON(), nullable=True),
|
||||
sa.Column('metadata', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('started_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('cache_hit', sa.Boolean(), nullable=True),
|
||||
sa.Column('cache_key', sa.String(), nullable=True),
|
||||
sa.Column('rate_limit_remaining', sa.Integer(), nullable=True),
|
||||
sa.Column('rate_limit_reset', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['api_key_id'], ['api_keys.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_usage_tracking_id'), 'usage_tracking', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_usage_tracking_request_id'), 'usage_tracking', ['request_id'], unique=True)
|
||||
op.create_index(op.f('ix_usage_tracking_session_id'), 'usage_tracking', ['session_id'], unique=False)
|
||||
|
||||
# Create audit_logs table
|
||||
op.create_table('audit_logs',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('action', sa.String(), nullable=False),
|
||||
sa.Column('resource_type', sa.String(), nullable=False),
|
||||
sa.Column('resource_id', sa.String(), nullable=True),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.Column('details', sa.JSON(), nullable=True),
|
||||
sa.Column('ip_address', sa.String(), nullable=True),
|
||||
sa.Column('user_agent', sa.String(), nullable=True),
|
||||
sa.Column('session_id', sa.String(), nullable=True),
|
||||
sa.Column('request_id', sa.String(), nullable=True),
|
||||
sa.Column('severity', sa.String(), nullable=True),
|
||||
sa.Column('category', sa.String(), nullable=True),
|
||||
sa.Column('success', sa.Boolean(), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('tags', sa.JSON(), nullable=True),
|
||||
sa.Column('metadata', sa.JSON(), nullable=True),
|
||||
sa.Column('old_values', sa.JSON(), nullable=True),
|
||||
sa.Column('new_values', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_audit_logs_id'), 'audit_logs', ['id'], unique=False)
|
||||
|
||||
# Create modules table
|
||||
op.create_table('modules',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('display_name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('module_type', sa.String(), nullable=True),
|
||||
sa.Column('category', sa.String(), nullable=True),
|
||||
sa.Column('version', sa.String(), nullable=False),
|
||||
sa.Column('author', sa.String(), nullable=True),
|
||||
sa.Column('license', sa.String(), nullable=True),
|
||||
sa.Column('status', sa.String(), nullable=True),
|
||||
sa.Column('is_enabled', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_core', sa.Boolean(), nullable=True),
|
||||
sa.Column('config_schema', sa.JSON(), nullable=True),
|
||||
sa.Column('config_values', sa.JSON(), nullable=True),
|
||||
sa.Column('default_config', sa.JSON(), nullable=True),
|
||||
sa.Column('dependencies', sa.JSON(), nullable=True),
|
||||
sa.Column('conflicts', sa.JSON(), nullable=True),
|
||||
sa.Column('install_path', sa.String(), nullable=True),
|
||||
sa.Column('entry_point', sa.String(), nullable=True),
|
||||
sa.Column('interceptor_chains', sa.JSON(), nullable=True),
|
||||
sa.Column('execution_order', sa.Integer(), nullable=True),
|
||||
sa.Column('api_endpoints', sa.JSON(), nullable=True),
|
||||
sa.Column('required_permissions', sa.JSON(), nullable=True),
|
||||
sa.Column('security_level', sa.String(), nullable=True),
|
||||
sa.Column('tags', sa.JSON(), nullable=True),
|
||||
sa.Column('metadata', sa.JSON(), nullable=True),
|
||||
sa.Column('last_error', sa.Text(), nullable=True),
|
||||
sa.Column('error_count', sa.Integer(), nullable=True),
|
||||
sa.Column('last_started', sa.DateTime(), nullable=True),
|
||||
sa.Column('last_stopped', sa.DateTime(), nullable=True),
|
||||
sa.Column('request_count', sa.Integer(), nullable=True),
|
||||
sa.Column('success_count', sa.Integer(), nullable=True),
|
||||
sa.Column('error_count_runtime', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('installed_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_modules_id'), 'modules', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_modules_name'), 'modules', ['name'], unique=True)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_modules_name'), table_name='modules')
|
||||
op.drop_index(op.f('ix_modules_id'), table_name='modules')
|
||||
op.drop_table('modules')
|
||||
op.drop_index(op.f('ix_audit_logs_id'), table_name='audit_logs')
|
||||
op.drop_index(op.f('ix_audit_logs_created_at'), table_name='audit_logs')
|
||||
op.drop_table('audit_logs')
|
||||
op.drop_index(op.f('ix_usage_tracking_session_id'), table_name='usage_tracking')
|
||||
op.drop_index(op.f('ix_usage_tracking_request_id'), table_name='usage_tracking')
|
||||
op.drop_index(op.f('ix_usage_tracking_id'), table_name='usage_tracking')
|
||||
op.drop_table('usage_tracking')
|
||||
op.drop_index(op.f('ix_budgets_id'), table_name='budgets')
|
||||
op.drop_table('budgets')
|
||||
op.drop_index(op.f('ix_api_keys_key_prefix'), table_name='api_keys')
|
||||
op.drop_index(op.f('ix_api_keys_key_hash'), table_name='api_keys')
|
||||
op.drop_index(op.f('ix_api_keys_id'), table_name='api_keys')
|
||||
op.drop_table('api_keys')
|
||||
op.drop_index(op.f('ix_users_username'), table_name='users')
|
||||
op.drop_index(op.f('ix_users_id'), table_name='users')
|
||||
op.drop_index(op.f('ix_users_email'), table_name='users')
|
||||
op.drop_table('users')
|
||||
@@ -1,84 +0,0 @@
|
||||
"""Add RAG collections and documents tables
|
||||
|
||||
Revision ID: 002
|
||||
Revises: 001
|
||||
Create Date: 2025-07-23 19:30:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '002'
|
||||
down_revision = '001'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create rag_collections table
|
||||
op.create_table('rag_collections',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('qdrant_collection_name', sa.String(255), nullable=False),
|
||||
sa.Column('document_count', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('size_bytes', sa.BigInteger(), nullable=False, server_default='0'),
|
||||
sa.Column('vector_count', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('status', sa.String(50), nullable=False, server_default='active'),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true'),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_rag_collections_id'), 'rag_collections', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_rag_collections_name'), 'rag_collections', ['name'], unique=False)
|
||||
op.create_index(op.f('ix_rag_collections_qdrant_collection_name'), 'rag_collections', ['qdrant_collection_name'], unique=True)
|
||||
|
||||
# Create rag_documents table
|
||||
op.create_table('rag_documents',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('collection_id', sa.Integer(), nullable=False),
|
||||
sa.Column('filename', sa.String(255), nullable=False),
|
||||
sa.Column('original_filename', sa.String(255), nullable=False),
|
||||
sa.Column('file_path', sa.String(500), nullable=False),
|
||||
sa.Column('file_type', sa.String(50), nullable=False),
|
||||
sa.Column('file_size', sa.BigInteger(), nullable=False),
|
||||
sa.Column('mime_type', sa.String(100), nullable=True),
|
||||
sa.Column('status', sa.String(50), nullable=False, server_default='processing'),
|
||||
sa.Column('processing_error', sa.Text(), nullable=True),
|
||||
sa.Column('converted_content', sa.Text(), nullable=True),
|
||||
sa.Column('word_count', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('character_count', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('vector_count', sa.Integer(), nullable=False, server_default='0'),
|
||||
sa.Column('chunk_size', sa.Integer(), nullable=False, server_default='1000'),
|
||||
sa.Column('document_metadata', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('processed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('indexed_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('is_deleted', sa.Boolean(), nullable=False, server_default='false'),
|
||||
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['collection_id'], ['rag_collections.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_rag_documents_id'), 'rag_documents', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_rag_documents_collection_id'), 'rag_documents', ['collection_id'], unique=False)
|
||||
op.create_index(op.f('ix_rag_documents_filename'), 'rag_documents', ['filename'], unique=False)
|
||||
op.create_index(op.f('ix_rag_documents_status'), 'rag_documents', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_rag_documents_created_at'), 'rag_documents', ['created_at'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_rag_documents_created_at'), table_name='rag_documents')
|
||||
op.drop_index(op.f('ix_rag_documents_status'), table_name='rag_documents')
|
||||
op.drop_index(op.f('ix_rag_documents_filename'), table_name='rag_documents')
|
||||
op.drop_index(op.f('ix_rag_documents_collection_id'), table_name='rag_documents')
|
||||
op.drop_index(op.f('ix_rag_documents_id'), table_name='rag_documents')
|
||||
op.drop_table('rag_documents')
|
||||
|
||||
op.drop_index(op.f('ix_rag_collections_qdrant_collection_name'), table_name='rag_collections')
|
||||
op.drop_index(op.f('ix_rag_collections_name'), table_name='rag_collections')
|
||||
op.drop_index(op.f('ix_rag_collections_id'), table_name='rag_collections')
|
||||
op.drop_table('rag_collections')
|
||||
@@ -1,82 +0,0 @@
|
||||
"""Fix budget and usage_tracking columns
|
||||
|
||||
Revision ID: 003
|
||||
Revises: 002
|
||||
Create Date: 2025-07-24 09:30:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '003'
|
||||
down_revision = '002'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add missing columns to budgets table
|
||||
op.add_column('budgets', sa.Column('api_key_id', sa.Integer(), nullable=True))
|
||||
op.add_column('budgets', sa.Column('limit_cents', sa.Integer(), nullable=False, server_default='0'))
|
||||
op.add_column('budgets', sa.Column('warning_threshold_cents', sa.Integer(), nullable=True))
|
||||
op.add_column('budgets', sa.Column('period_type', sa.String(), nullable=False, server_default='monthly'))
|
||||
op.add_column('budgets', sa.Column('current_usage_cents', sa.Integer(), nullable=True, server_default='0'))
|
||||
op.add_column('budgets', sa.Column('is_exceeded', sa.Boolean(), nullable=True, server_default='false'))
|
||||
op.add_column('budgets', sa.Column('is_warning_sent', sa.Boolean(), nullable=True, server_default='false'))
|
||||
op.add_column('budgets', sa.Column('enforce_hard_limit', sa.Boolean(), nullable=True, server_default='true'))
|
||||
op.add_column('budgets', sa.Column('enforce_warning', sa.Boolean(), nullable=True, server_default='true'))
|
||||
op.add_column('budgets', sa.Column('auto_renew', sa.Boolean(), nullable=True, server_default='true'))
|
||||
op.add_column('budgets', sa.Column('rollover_unused', sa.Boolean(), nullable=True, server_default='false'))
|
||||
op.add_column('budgets', sa.Column('notification_settings', sa.JSON(), nullable=True))
|
||||
|
||||
# Create foreign key for api_key_id
|
||||
op.create_foreign_key('fk_budgets_api_key_id', 'budgets', 'api_keys', ['api_key_id'], ['id'])
|
||||
|
||||
# Update usage_tracking table
|
||||
op.add_column('usage_tracking', sa.Column('budget_id', sa.Integer(), nullable=True))
|
||||
op.add_column('usage_tracking', sa.Column('model', sa.String(), nullable=True))
|
||||
op.add_column('usage_tracking', sa.Column('request_tokens', sa.Integer(), nullable=True))
|
||||
op.add_column('usage_tracking', sa.Column('response_tokens', sa.Integer(), nullable=True))
|
||||
op.add_column('usage_tracking', sa.Column('cost_cents', sa.Integer(), nullable=True))
|
||||
op.add_column('usage_tracking', sa.Column('cost_currency', sa.String(), nullable=True, server_default='USD'))
|
||||
op.add_column('usage_tracking', sa.Column('response_time_ms', sa.Integer(), nullable=True))
|
||||
op.add_column('usage_tracking', sa.Column('request_metadata', sa.JSON(), nullable=True))
|
||||
|
||||
# Create foreign key for budget_id
|
||||
op.create_foreign_key('fk_usage_tracking_budget_id', 'usage_tracking', 'budgets', ['budget_id'], ['id'])
|
||||
|
||||
# Update modules table
|
||||
op.add_column('modules', sa.Column('module_metadata', sa.JSON(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Remove added columns from modules
|
||||
op.drop_column('modules', 'module_metadata')
|
||||
|
||||
# Remove added columns and constraints from usage_tracking
|
||||
op.drop_constraint('fk_usage_tracking_budget_id', 'usage_tracking', type_='foreignkey')
|
||||
op.drop_column('usage_tracking', 'request_metadata')
|
||||
op.drop_column('usage_tracking', 'response_time_ms')
|
||||
op.drop_column('usage_tracking', 'cost_currency')
|
||||
op.drop_column('usage_tracking', 'cost_cents')
|
||||
op.drop_column('usage_tracking', 'response_tokens')
|
||||
op.drop_column('usage_tracking', 'request_tokens')
|
||||
op.drop_column('usage_tracking', 'model')
|
||||
op.drop_column('usage_tracking', 'budget_id')
|
||||
|
||||
# Remove added columns and constraints from budgets
|
||||
op.drop_constraint('fk_budgets_api_key_id', 'budgets', type_='foreignkey')
|
||||
op.drop_column('budgets', 'notification_settings')
|
||||
op.drop_column('budgets', 'rollover_unused')
|
||||
op.drop_column('budgets', 'auto_renew')
|
||||
op.drop_column('budgets', 'enforce_warning')
|
||||
op.drop_column('budgets', 'enforce_hard_limit')
|
||||
op.drop_column('budgets', 'is_warning_sent')
|
||||
op.drop_column('budgets', 'is_exceeded')
|
||||
op.drop_column('budgets', 'current_usage_cents')
|
||||
op.drop_column('budgets', 'period_type')
|
||||
op.drop_column('budgets', 'warning_threshold_cents')
|
||||
op.drop_column('budgets', 'limit_cents')
|
||||
op.drop_column('budgets', 'api_key_id')
|
||||
@@ -1,34 +0,0 @@
|
||||
"""Add budget fields to API keys
|
||||
|
||||
Revision ID: 004_add_api_key_budget_fields
|
||||
Revises: 8bf097417ff0
|
||||
Create Date: 2024-07-25 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '004_add_api_key_budget_fields'
|
||||
down_revision = '8bf097417ff0'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Add budget-related fields to api_keys table"""
|
||||
# Add budget configuration columns
|
||||
op.add_column('api_keys', sa.Column('is_unlimited', sa.Boolean(), default=True, nullable=False))
|
||||
op.add_column('api_keys', sa.Column('budget_limit_cents', sa.Integer(), nullable=True))
|
||||
op.add_column('api_keys', sa.Column('budget_type', sa.String(), nullable=True))
|
||||
|
||||
# Set default values for existing records
|
||||
op.execute("UPDATE api_keys SET is_unlimited = true WHERE is_unlimited IS NULL")
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove budget-related fields from api_keys table"""
|
||||
op.drop_column('api_keys', 'budget_type')
|
||||
op.drop_column('api_keys', 'budget_limit_cents')
|
||||
op.drop_column('api_keys', 'is_unlimited')
|
||||
@@ -1,192 +0,0 @@
|
||||
"""Add prompt templates for editable chatbot prompts
|
||||
|
||||
Revision ID: 005_add_prompt_templates
|
||||
Revises: 004_add_api_key_budget_fields
|
||||
Create Date: 2025-08-07 17:50:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '005_add_prompt_templates'
|
||||
down_revision = '004_add_api_key_budget_fields'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create prompt_templates table
|
||||
op.create_table(
|
||||
'prompt_templates',
|
||||
sa.Column('id', sa.String(), primary_key=True, index=True),
|
||||
sa.Column('name', sa.String(255), nullable=False, index=True),
|
||||
sa.Column('type_key', sa.String(100), nullable=False, unique=True, index=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('system_prompt', sa.Text(), nullable=False),
|
||||
sa.Column('is_default', sa.Boolean(), default=True, nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), default=True, nullable=False),
|
||||
sa.Column('version', sa.Integer(), default=1, nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now()),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.func.now(), onupdate=sa.func.now())
|
||||
)
|
||||
|
||||
# Create prompt_variables table
|
||||
op.create_table(
|
||||
'prompt_variables',
|
||||
sa.Column('id', sa.String(), primary_key=True, index=True),
|
||||
sa.Column('variable_name', sa.String(100), nullable=False, unique=True, index=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('example_value', sa.String(500), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), default=True, nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.func.now())
|
||||
)
|
||||
|
||||
# Insert default prompt templates
|
||||
prompt_templates_table = sa.table(
|
||||
'prompt_templates',
|
||||
sa.column('id', sa.String),
|
||||
sa.column('name', sa.String),
|
||||
sa.column('type_key', sa.String),
|
||||
sa.column('description', sa.Text),
|
||||
sa.column('system_prompt', sa.Text),
|
||||
sa.column('is_default', sa.Boolean),
|
||||
sa.column('is_active', sa.Boolean),
|
||||
sa.column('version', sa.Integer),
|
||||
sa.column('created_at', sa.DateTime),
|
||||
sa.column('updated_at', sa.DateTime)
|
||||
)
|
||||
|
||||
current_time = datetime.utcnow()
|
||||
|
||||
default_prompts = [
|
||||
{
|
||||
'id': str(uuid.uuid4()),
|
||||
'name': 'General Assistant',
|
||||
'type_key': 'assistant',
|
||||
'description': 'Helpful AI assistant for general questions and tasks',
|
||||
'system_prompt': 'You are a helpful AI assistant. Provide accurate, concise, and friendly responses. Always aim to be helpful while being honest about your limitations. When you don\'t know something, say so clearly. Be professional but approachable in your communication style.',
|
||||
'is_default': True,
|
||||
'is_active': True,
|
||||
'version': 1,
|
||||
'created_at': current_time,
|
||||
'updated_at': current_time
|
||||
},
|
||||
{
|
||||
'id': str(uuid.uuid4()),
|
||||
'name': 'Customer Support',
|
||||
'type_key': 'customer_support',
|
||||
'description': 'Professional customer service chatbot',
|
||||
'system_prompt': 'You are a professional customer support representative. Be empathetic, professional, and solution-focused in all interactions. Always try to understand the customer\'s issue fully before providing solutions. Use the knowledge base to provide accurate information. When you cannot resolve an issue, explain clearly how the customer can escalate or get further help. Maintain a helpful and patient tone even in difficult situations.',
|
||||
'is_default': True,
|
||||
'is_active': True,
|
||||
'version': 1,
|
||||
'created_at': current_time,
|
||||
'updated_at': current_time
|
||||
},
|
||||
{
|
||||
'id': str(uuid.uuid4()),
|
||||
'name': 'Educational Tutor',
|
||||
'type_key': 'teacher',
|
||||
'description': 'Educational tutor and learning assistant',
|
||||
'system_prompt': 'You are an experienced educational tutor and learning facilitator. Break down complex concepts into understandable, digestible parts. Use analogies, examples, and step-by-step explanations to help students learn. Encourage critical thinking through thoughtful questions. Be patient, supportive, and encouraging. Adapt your teaching style to different learning preferences. When a student makes mistakes, guide them to the correct answer rather than just providing it.',
|
||||
'is_default': True,
|
||||
'is_active': True,
|
||||
'version': 1,
|
||||
'created_at': current_time,
|
||||
'updated_at': current_time
|
||||
},
|
||||
{
|
||||
'id': str(uuid.uuid4()),
|
||||
'name': 'Research Assistant',
|
||||
'type_key': 'researcher',
|
||||
'description': 'Research assistant with fact-checking focus',
|
||||
'system_prompt': 'You are a thorough research assistant with a focus on accuracy and evidence-based information. Provide well-researched, factual information with sources when possible. Be thorough in your analysis and present multiple perspectives when relevant topics have different viewpoints. Always distinguish between established facts, current research, and opinions. When information is uncertain or contested, clearly communicate the level of confidence and supporting evidence.',
|
||||
'is_default': True,
|
||||
'is_active': True,
|
||||
'version': 1,
|
||||
'created_at': current_time,
|
||||
'updated_at': current_time
|
||||
},
|
||||
{
|
||||
'id': str(uuid.uuid4()),
|
||||
'name': 'Creative Writing Assistant',
|
||||
'type_key': 'creative_writer',
|
||||
'description': 'Creative writing and storytelling assistant',
|
||||
'system_prompt': 'You are an experienced creative writing mentor and storytelling expert. Help with brainstorming ideas, character development, plot structure, dialogue, and creative expression. Be imaginative and inspiring while providing constructive, actionable feedback. Encourage experimentation with different writing styles and techniques. When reviewing work, balance praise for strengths with specific suggestions for improvement. Help writers find their unique voice while mastering fundamental storytelling principles.',
|
||||
'is_default': True,
|
||||
'is_active': True,
|
||||
'version': 1,
|
||||
'created_at': current_time,
|
||||
'updated_at': current_time
|
||||
},
|
||||
{
|
||||
'id': str(uuid.uuid4()),
|
||||
'name': 'Custom Chatbot',
|
||||
'type_key': 'custom',
|
||||
'description': 'Fully customizable chatbot with user-defined personality',
|
||||
'system_prompt': 'You are a helpful AI assistant. Your personality, expertise, and behavior will be defined by the user through custom instructions. Follow the user\'s guidance on how to respond, what tone to use, and what role to play. Be adaptable and responsive to the specific needs and preferences outlined in your configuration.',
|
||||
'is_default': True,
|
||||
'is_active': True,
|
||||
'version': 1,
|
||||
'created_at': current_time,
|
||||
'updated_at': current_time
|
||||
}
|
||||
]
|
||||
|
||||
op.bulk_insert(prompt_templates_table, default_prompts)
|
||||
|
||||
# Insert default prompt variables
|
||||
variables_table = sa.table(
|
||||
'prompt_variables',
|
||||
sa.column('id', sa.String),
|
||||
sa.column('variable_name', sa.String),
|
||||
sa.column('description', sa.Text),
|
||||
sa.column('example_value', sa.String),
|
||||
sa.column('is_active', sa.Boolean),
|
||||
sa.column('created_at', sa.DateTime)
|
||||
)
|
||||
|
||||
default_variables = [
|
||||
{
|
||||
'id': str(uuid.uuid4()),
|
||||
'variable_name': '{user_name}',
|
||||
'description': 'The name of the user chatting with the bot',
|
||||
'example_value': 'John Smith',
|
||||
'is_active': True,
|
||||
'created_at': current_time
|
||||
},
|
||||
{
|
||||
'id': str(uuid.uuid4()),
|
||||
'variable_name': '{context}',
|
||||
'description': 'Additional context from RAG or previous conversation',
|
||||
'example_value': 'Based on the uploaded documents...',
|
||||
'is_active': True,
|
||||
'created_at': current_time
|
||||
},
|
||||
{
|
||||
'id': str(uuid.uuid4()),
|
||||
'variable_name': '{company_name}',
|
||||
'description': 'Your company or organization name',
|
||||
'example_value': 'Acme Corporation',
|
||||
'is_active': True,
|
||||
'created_at': current_time
|
||||
},
|
||||
{
|
||||
'id': str(uuid.uuid4()),
|
||||
'variable_name': '{current_date}',
|
||||
'description': 'Current date and time',
|
||||
'example_value': '2025-08-07 17:50:00',
|
||||
'is_active': True,
|
||||
'created_at': current_time
|
||||
}
|
||||
]
|
||||
|
||||
op.bulk_insert(variables_table, default_variables)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table('prompt_variables')
|
||||
op.drop_table('prompt_templates')
|
||||
@@ -1,33 +0,0 @@
|
||||
"""Add chatbot API key support
|
||||
|
||||
Revision ID: 009_add_chatbot_api_key_support
|
||||
Revises: 004_add_api_key_budget_fields
|
||||
Create Date: 2025-01-08 12:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers
|
||||
revision = '009_add_chatbot_api_key_support'
|
||||
down_revision = '004_add_api_key_budget_fields'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Add allowed_chatbots column to api_keys table"""
|
||||
# Add the allowed_chatbots column
|
||||
op.add_column('api_keys', sa.Column('allowed_chatbots', sa.JSON(), nullable=True))
|
||||
|
||||
# Update existing records to have empty allowed_chatbots list
|
||||
op.execute("UPDATE api_keys SET allowed_chatbots = '[]' WHERE allowed_chatbots IS NULL")
|
||||
|
||||
# Make the column non-nullable with a default
|
||||
op.alter_column('api_keys', 'allowed_chatbots', nullable=False, server_default='[]')
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Remove allowed_chatbots column from api_keys table"""
|
||||
op.drop_column('api_keys', 'allowed_chatbots')
|
||||
@@ -1,81 +0,0 @@
|
||||
"""add workflow tables only
|
||||
|
||||
Revision ID: 010_add_workflow_tables_only
|
||||
Revises: f7af0923d38b
|
||||
Create Date: 2025-08-18 09:03:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '010_add_workflow_tables_only'
|
||||
down_revision = 'f7af0923d38b'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create workflow_definitions table
|
||||
op.create_table('workflow_definitions',
|
||||
sa.Column('id', sa.String(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('version', sa.String(length=50), nullable=True),
|
||||
sa.Column('steps', sa.JSON(), nullable=False),
|
||||
sa.Column('variables', sa.JSON(), nullable=True),
|
||||
sa.Column('metadata', sa.JSON(), nullable=True),
|
||||
sa.Column('timeout', sa.Integer(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('created_by', sa.String(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create workflow_executions table
|
||||
op.create_table('workflow_executions',
|
||||
sa.Column('id', sa.String(), nullable=False),
|
||||
sa.Column('workflow_id', sa.String(), nullable=False),
|
||||
sa.Column('status', sa.Enum('PENDING', 'RUNNING', 'COMPLETED', 'FAILED', 'CANCELLED', name='workflowstatus'), nullable=True),
|
||||
sa.Column('current_step', sa.String(), nullable=True),
|
||||
sa.Column('input_data', sa.JSON(), nullable=True),
|
||||
sa.Column('context', sa.JSON(), nullable=True),
|
||||
sa.Column('results', sa.JSON(), nullable=True),
|
||||
sa.Column('error', sa.Text(), nullable=True),
|
||||
sa.Column('started_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('executed_by', sa.String(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['workflow_id'], ['workflow_definitions.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
# Create workflow_step_logs table
|
||||
op.create_table('workflow_step_logs',
|
||||
sa.Column('id', sa.String(), nullable=False),
|
||||
sa.Column('execution_id', sa.String(), nullable=False),
|
||||
sa.Column('step_id', sa.String(), nullable=False),
|
||||
sa.Column('step_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('step_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('status', sa.String(length=50), nullable=False),
|
||||
sa.Column('input_data', sa.JSON(), nullable=True),
|
||||
sa.Column('output_data', sa.JSON(), nullable=True),
|
||||
sa.Column('error', sa.Text(), nullable=True),
|
||||
sa.Column('started_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('duration_ms', sa.Integer(), nullable=True),
|
||||
sa.Column('retry_count', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['execution_id'], ['workflow_executions.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table('workflow_step_logs')
|
||||
op.drop_table('workflow_executions')
|
||||
op.drop_table('workflow_definitions')
|
||||
op.execute('DROP TYPE IF EXISTS workflowstatus')
|
||||
@@ -1,79 +0,0 @@
|
||||
"""Add chatbot tables
|
||||
|
||||
Revision ID: 8bf097417ff0
|
||||
Revises: 003
|
||||
Create Date: 2025-07-25 03:58:39.403887
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '8bf097417ff0'
|
||||
down_revision = '003'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust\! ###
|
||||
op.create_table('chatbot_instances',
|
||||
sa.Column('id', sa.String(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('config', sa.JSON(), nullable=False),
|
||||
sa.Column('created_by', sa.String(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('chatbot_conversations',
|
||||
sa.Column('id', sa.String(), nullable=False),
|
||||
sa.Column('chatbot_id', sa.String(), nullable=False),
|
||||
sa.Column('user_id', sa.String(), nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.Column('context_data', sa.JSON(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['chatbot_id'], ['chatbot_instances.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('chatbot_analytics',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('chatbot_id', sa.String(), nullable=False),
|
||||
sa.Column('user_id', sa.String(), nullable=False),
|
||||
sa.Column('event_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('event_data', sa.JSON(), nullable=True),
|
||||
sa.Column('response_time_ms', sa.Integer(), nullable=True),
|
||||
sa.Column('token_count', sa.Integer(), nullable=True),
|
||||
sa.Column('cost_cents', sa.Integer(), nullable=True),
|
||||
sa.Column('model_used', sa.String(length=100), nullable=True),
|
||||
sa.Column('rag_used', sa.Boolean(), nullable=True),
|
||||
sa.Column('timestamp', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['chatbot_id'], ['chatbot_instances.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('chatbot_messages',
|
||||
sa.Column('id', sa.String(), nullable=False),
|
||||
sa.Column('conversation_id', sa.String(), nullable=False),
|
||||
sa.Column('role', sa.String(length=20), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('timestamp', sa.DateTime(), nullable=True),
|
||||
sa.Column('message_metadata', sa.JSON(), nullable=True),
|
||||
sa.Column('sources', sa.JSON(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['conversation_id'], ['chatbot_conversations.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust\! ###
|
||||
op.drop_table('chatbot_messages')
|
||||
op.drop_table('chatbot_analytics')
|
||||
op.drop_table('chatbot_conversations')
|
||||
op.drop_table('chatbot_instances')
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,120 +0,0 @@
|
||||
"""add_zammad_integration_tables
|
||||
|
||||
Revision ID: 9645f764a517
|
||||
Revises: 010_add_workflow_tables_only
|
||||
Create Date: 2025-08-19 19:55:18.895986
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9645f764a517'
|
||||
down_revision = '010_add_workflow_tables_only'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create zammad_tickets table
|
||||
op.create_table(
|
||||
'zammad_tickets',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('zammad_ticket_id', sa.Integer(), nullable=False),
|
||||
sa.Column('ticket_number', sa.String(), nullable=False),
|
||||
sa.Column('title', sa.String(), nullable=False),
|
||||
sa.Column('state', sa.String(), nullable=False),
|
||||
sa.Column('priority', sa.String(), nullable=True),
|
||||
sa.Column('customer_email', sa.String(), nullable=True),
|
||||
sa.Column('processing_status', sa.String(), nullable=False),
|
||||
sa.Column('processed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('processed_by_user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('chatbot_id', sa.String(), nullable=True),
|
||||
sa.Column('summary', sa.Text(), nullable=True),
|
||||
sa.Column('context_data', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('zammad_created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('zammad_updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('zammad_article_count', sa.Integer(), nullable=False),
|
||||
sa.Column('config_snapshot', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.ForeignKeyConstraint(['processed_by_user_id'], ['users.id'], ),
|
||||
)
|
||||
|
||||
# Create indexes for zammad_tickets
|
||||
op.create_index('idx_zammad_tickets_status_created', 'zammad_tickets', ['processing_status', 'created_at'])
|
||||
op.create_index('idx_zammad_tickets_state_processed', 'zammad_tickets', ['state', 'processed_at'])
|
||||
op.create_index('idx_zammad_tickets_user_status', 'zammad_tickets', ['processed_by_user_id', 'processing_status'])
|
||||
op.create_index(op.f('ix_zammad_tickets_id'), 'zammad_tickets', ['id'])
|
||||
op.create_index(op.f('ix_zammad_tickets_ticket_number'), 'zammad_tickets', ['ticket_number'])
|
||||
op.create_index(op.f('ix_zammad_tickets_zammad_ticket_id'), 'zammad_tickets', ['zammad_ticket_id'], unique=True)
|
||||
|
||||
# Create zammad_processing_logs table
|
||||
op.create_table(
|
||||
'zammad_processing_logs',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('batch_id', sa.String(), nullable=False),
|
||||
sa.Column('started_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('completed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('initiated_by_user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('config_used', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('filters_applied', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('tickets_found', sa.Integer(), nullable=False),
|
||||
sa.Column('tickets_processed', sa.Integer(), nullable=False),
|
||||
sa.Column('tickets_failed', sa.Integer(), nullable=False),
|
||||
sa.Column('tickets_skipped', sa.Integer(), nullable=False),
|
||||
sa.Column('processing_time_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('average_time_per_ticket', sa.Integer(), nullable=True),
|
||||
sa.Column('errors_encountered', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('status', sa.String(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.ForeignKeyConstraint(['initiated_by_user_id'], ['users.id'], ),
|
||||
)
|
||||
|
||||
# Create indexes for zammad_processing_logs
|
||||
op.create_index('idx_processing_logs_batch_status', 'zammad_processing_logs', ['batch_id', 'status'])
|
||||
op.create_index('idx_processing_logs_user_started', 'zammad_processing_logs', ['initiated_by_user_id', 'started_at'])
|
||||
op.create_index(op.f('ix_zammad_processing_logs_batch_id'), 'zammad_processing_logs', ['batch_id'])
|
||||
op.create_index(op.f('ix_zammad_processing_logs_id'), 'zammad_processing_logs', ['id'])
|
||||
|
||||
# Create zammad_configurations table
|
||||
op.create_table(
|
||||
'zammad_configurations',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('is_default', sa.Boolean(), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False),
|
||||
sa.Column('zammad_url', sa.String(), nullable=False),
|
||||
sa.Column('api_token_encrypted', sa.String(), nullable=False),
|
||||
sa.Column('chatbot_id', sa.String(), nullable=False),
|
||||
sa.Column('process_state', sa.String(), nullable=False),
|
||||
sa.Column('max_tickets', sa.Integer(), nullable=False),
|
||||
sa.Column('skip_existing', sa.Boolean(), nullable=False),
|
||||
sa.Column('auto_process', sa.Boolean(), nullable=False),
|
||||
sa.Column('process_interval', sa.Integer(), nullable=False),
|
||||
sa.Column('summary_template', sa.Text(), nullable=True),
|
||||
sa.Column('custom_settings', postgresql.JSON(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('last_used_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||
)
|
||||
|
||||
# Create indexes for zammad_configurations
|
||||
op.create_index('idx_zammad_config_user_active', 'zammad_configurations', ['user_id', 'is_active'])
|
||||
op.create_index('idx_zammad_config_user_default', 'zammad_configurations', ['user_id', 'is_default'])
|
||||
op.create_index(op.f('ix_zammad_configurations_id'), 'zammad_configurations', ['id'])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Drop tables in reverse order
|
||||
op.drop_table('zammad_configurations')
|
||||
op.drop_table('zammad_processing_logs')
|
||||
op.drop_table('zammad_tickets')
|
||||
@@ -1,24 +0,0 @@
|
||||
"""merge prompt templates and chatbot api key support
|
||||
|
||||
Revision ID: f7af0923d38b
|
||||
Revises: 005_add_prompt_templates, 009_add_chatbot_api_key_support
|
||||
Create Date: 2025-08-18 06:51:17.515233
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f7af0923d38b'
|
||||
down_revision = ('005_add_prompt_templates', '009_add_chatbot_api_key_support')
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
pass
|
||||
@@ -18,7 +18,7 @@ from .rag import router as rag_router
|
||||
from .chatbot import router as chatbot_router
|
||||
from .prompt_templates import router as prompt_templates_router
|
||||
from .security import router as security_router
|
||||
from .zammad import router as zammad_router
|
||||
from .plugin_registry import router as plugin_registry_router
|
||||
|
||||
# Create main API router
|
||||
api_router = APIRouter()
|
||||
@@ -68,5 +68,6 @@ api_router.include_router(prompt_templates_router, prefix="/prompt-templates", t
|
||||
# Include security routes
|
||||
api_router.include_router(security_router, prefix="/security", tags=["security"])
|
||||
|
||||
# Include Zammad integration routes
|
||||
api_router.include_router(zammad_router, prefix="/zammad", tags=["zammad"])
|
||||
|
||||
# Include plugin registry routes
|
||||
api_router.include_router(plugin_registry_router, prefix="/plugins", tags=["plugins"])
|
||||
@@ -42,6 +42,7 @@ class ChatRequest(BaseModel):
|
||||
|
||||
|
||||
@router.get("/list")
|
||||
@router.get("/instances")
|
||||
async def list_chatbots(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
|
||||
@@ -601,7 +601,7 @@ async def create_embedding(
|
||||
|
||||
@router.get("/health")
|
||||
async def llm_health_check(
|
||||
context: Dict[str, Any] = Depends(require_api_key)
|
||||
context: Dict[str, Any] = Depends(get_auth_context)
|
||||
):
|
||||
"""Health check for LLM service"""
|
||||
try:
|
||||
|
||||
@@ -3,6 +3,7 @@ User management API endpoints
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||
from pydantic import BaseModel, EmailStr
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
@@ -43,7 +44,7 @@ class UserUpdate(BaseModel):
|
||||
|
||||
|
||||
class UserResponse(BaseModel):
|
||||
id: str
|
||||
id: int
|
||||
username: str
|
||||
email: str
|
||||
full_name: Optional[str] = None
|
||||
@@ -51,9 +52,9 @@ class UserResponse(BaseModel):
|
||||
is_active: bool
|
||||
is_verified: bool
|
||||
is_superuser: bool
|
||||
created_at: str
|
||||
updated_at: Optional[str] = None
|
||||
last_login: Optional[str] = None
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime] = None
|
||||
last_login: Optional[datetime] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
@@ -1,670 +0,0 @@
|
||||
"""
|
||||
Zammad Integration API endpoints
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import Dict, Any, List, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks
|
||||
from pydantic import BaseModel, validator
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, and_, or_, desc
|
||||
from datetime import datetime
|
||||
|
||||
from app.db.database import get_db
|
||||
from app.core.logging import log_api_request
|
||||
from app.services.module_manager import module_manager
|
||||
from app.core.security import get_current_user
|
||||
from app.models.user import User
|
||||
from app.services.api_key_auth import get_api_key_auth
|
||||
from app.models.api_key import APIKey
|
||||
from app.models.chatbot import ChatbotInstance
|
||||
|
||||
# Import Zammad models
|
||||
from modules.zammad.models import (
|
||||
ZammadTicket,
|
||||
ZammadProcessingLog,
|
||||
ZammadConfiguration,
|
||||
ProcessingStatus
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
class ZammadConfigurationRequest(BaseModel):
|
||||
"""Request model for creating/updating Zammad configuration"""
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
is_default: bool = False
|
||||
zammad_url: str
|
||||
api_token: str
|
||||
chatbot_id: str
|
||||
process_state: str = "open"
|
||||
max_tickets: int = 10
|
||||
skip_existing: bool = True
|
||||
auto_process: bool = False
|
||||
process_interval: int = 30
|
||||
summary_template: Optional[str] = None
|
||||
custom_settings: Optional[Dict[str, Any]] = {}
|
||||
|
||||
@validator('zammad_url')
|
||||
def validate_zammad_url(cls, v):
|
||||
if not v.startswith(('http://', 'https://')):
|
||||
raise ValueError('Zammad URL must start with http:// or https://')
|
||||
return v.rstrip('/')
|
||||
|
||||
@validator('max_tickets')
|
||||
def validate_max_tickets(cls, v):
|
||||
if not 1 <= v <= 100:
|
||||
raise ValueError('max_tickets must be between 1 and 100')
|
||||
return v
|
||||
|
||||
@validator('process_interval')
|
||||
def validate_process_interval(cls, v):
|
||||
if not 5 <= v <= 1440:
|
||||
raise ValueError('process_interval must be between 5 and 1440 minutes')
|
||||
return v
|
||||
|
||||
|
||||
class ProcessTicketsRequest(BaseModel):
|
||||
"""Request model for processing tickets"""
|
||||
config_id: Optional[int] = None
|
||||
filters: Dict[str, Any] = {}
|
||||
|
||||
@validator('filters', pre=True)
|
||||
def validate_filters(cls, v):
|
||||
"""Ensure filters is always a dict"""
|
||||
if v is None:
|
||||
return {}
|
||||
if isinstance(v, list):
|
||||
# If someone passes a list, convert to empty dict
|
||||
return {}
|
||||
if not isinstance(v, dict):
|
||||
# If it's some other type, convert to empty dict
|
||||
return {}
|
||||
return v
|
||||
|
||||
|
||||
class ProcessSingleTicketRequest(BaseModel):
|
||||
"""Request model for processing a single ticket"""
|
||||
ticket_id: int
|
||||
config_id: Optional[int] = None
|
||||
|
||||
|
||||
class TestConnectionRequest(BaseModel):
|
||||
"""Request model for testing Zammad connection"""
|
||||
zammad_url: str
|
||||
api_token: str
|
||||
|
||||
|
||||
@router.get("/configurations")
|
||||
async def get_configurations(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get all Zammad configurations for the current user"""
|
||||
user_id = current_user.get("id") if isinstance(current_user, dict) else current_user.id
|
||||
|
||||
try:
|
||||
# Get configurations from database
|
||||
stmt = (
|
||||
select(ZammadConfiguration)
|
||||
.where(ZammadConfiguration.user_id == user_id)
|
||||
.where(ZammadConfiguration.is_active == True)
|
||||
.order_by(ZammadConfiguration.is_default.desc(), ZammadConfiguration.created_at.desc())
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
configurations = [config.to_dict() for config in result.scalars()]
|
||||
|
||||
return {
|
||||
"configurations": configurations,
|
||||
"count": len(configurations)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error fetching configurations: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/configurations")
|
||||
async def create_configuration(
|
||||
config_request: ZammadConfigurationRequest,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Create a new Zammad configuration"""
|
||||
user_id = current_user.get("id") if isinstance(current_user, dict) else current_user.id
|
||||
|
||||
try:
|
||||
# Verify chatbot exists and user has access
|
||||
chatbot_stmt = select(ChatbotInstance).where(
|
||||
and_(
|
||||
ChatbotInstance.id == config_request.chatbot_id,
|
||||
ChatbotInstance.created_by == str(user_id),
|
||||
ChatbotInstance.is_active == True
|
||||
)
|
||||
)
|
||||
chatbot = await db.scalar(chatbot_stmt)
|
||||
if not chatbot:
|
||||
raise HTTPException(status_code=404, detail="Chatbot not found or access denied")
|
||||
|
||||
# Use the module to handle configuration creation
|
||||
zammad_module = module_manager.get_module("zammad")
|
||||
if not zammad_module:
|
||||
raise HTTPException(status_code=503, detail="Zammad module not available")
|
||||
|
||||
request_data = {
|
||||
"action": "save_configuration",
|
||||
"configuration": config_request.dict()
|
||||
}
|
||||
|
||||
context = {
|
||||
"user_id": user_id,
|
||||
"user_permissions": current_user.get("permissions", [])
|
||||
}
|
||||
|
||||
result = await zammad_module.execute_with_interceptors(request_data, context)
|
||||
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error creating configuration: {str(e)}")
|
||||
|
||||
|
||||
@router.put("/configurations/{config_id}")
|
||||
async def update_configuration(
|
||||
config_id: int,
|
||||
config_request: ZammadConfigurationRequest,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Update an existing Zammad configuration"""
|
||||
user_id = current_user.get("id") if isinstance(current_user, dict) else current_user.id
|
||||
|
||||
try:
|
||||
# Check if configuration exists and belongs to user
|
||||
stmt = select(ZammadConfiguration).where(
|
||||
and_(
|
||||
ZammadConfiguration.id == config_id,
|
||||
ZammadConfiguration.user_id == user_id
|
||||
)
|
||||
)
|
||||
existing_config = await db.scalar(stmt)
|
||||
if not existing_config:
|
||||
raise HTTPException(status_code=404, detail="Configuration not found")
|
||||
|
||||
# Verify chatbot exists and user has access
|
||||
chatbot_stmt = select(ChatbotInstance).where(
|
||||
and_(
|
||||
ChatbotInstance.id == config_request.chatbot_id,
|
||||
ChatbotInstance.created_by == str(user_id),
|
||||
ChatbotInstance.is_active == True
|
||||
)
|
||||
)
|
||||
chatbot = await db.scalar(chatbot_stmt)
|
||||
if not chatbot:
|
||||
raise HTTPException(status_code=404, detail="Chatbot not found or access denied")
|
||||
|
||||
# Deactivate old configuration and create new one (for audit trail)
|
||||
existing_config.is_active = False
|
||||
|
||||
# Use the module to handle configuration creation
|
||||
zammad_module = module_manager.get_module("zammad")
|
||||
if not zammad_module:
|
||||
raise HTTPException(status_code=503, detail="Zammad module not available")
|
||||
|
||||
# For updates, pass the existing api_token if not provided in the request
|
||||
config_data = config_request.dict()
|
||||
if not config_data.get("api_token"):
|
||||
# Use existing encrypted token for the new config
|
||||
config_data["existing_encrypted_token"] = existing_config.api_token_encrypted
|
||||
|
||||
request_data = {
|
||||
"action": "save_configuration",
|
||||
"configuration": config_data
|
||||
}
|
||||
|
||||
context = {
|
||||
"user_id": user_id,
|
||||
"user_permissions": current_user.get("permissions", [])
|
||||
}
|
||||
|
||||
result = await zammad_module.execute_with_interceptors(request_data, context)
|
||||
|
||||
await db.commit()
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error updating configuration: {str(e)}")
|
||||
|
||||
|
||||
@router.delete("/configurations/{config_id}")
|
||||
async def delete_configuration(
|
||||
config_id: int,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Delete (deactivate) a Zammad configuration"""
|
||||
user_id = current_user.get("id") if isinstance(current_user, dict) else current_user.id
|
||||
|
||||
try:
|
||||
# Check if configuration exists and belongs to user
|
||||
stmt = select(ZammadConfiguration).where(
|
||||
and_(
|
||||
ZammadConfiguration.id == config_id,
|
||||
ZammadConfiguration.user_id == user_id
|
||||
)
|
||||
)
|
||||
config = await db.scalar(stmt)
|
||||
if not config:
|
||||
raise HTTPException(status_code=404, detail="Configuration not found")
|
||||
|
||||
# Deactivate instead of deleting (for audit trail)
|
||||
config.is_active = False
|
||||
config.updated_at = datetime.utcnow()
|
||||
|
||||
await db.commit()
|
||||
|
||||
return {"message": "Configuration deleted successfully"}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error deleting configuration: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/test-connection")
|
||||
async def test_connection(
|
||||
test_request: TestConnectionRequest,
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Test connection to a Zammad instance"""
|
||||
try:
|
||||
zammad_module = module_manager.get_module("zammad")
|
||||
if not zammad_module:
|
||||
raise HTTPException(status_code=503, detail="Zammad module not available")
|
||||
|
||||
user_id = current_user.get("id") if isinstance(current_user, dict) else current_user.id
|
||||
|
||||
request_data = {
|
||||
"action": "test_connection",
|
||||
"zammad_url": test_request.zammad_url,
|
||||
"api_token": test_request.api_token
|
||||
}
|
||||
|
||||
context = {
|
||||
"user_id": user_id,
|
||||
"user_permissions": current_user.get("permissions", [])
|
||||
}
|
||||
|
||||
result = await zammad_module.execute_with_interceptors(request_data, context)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error testing connection: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/process")
|
||||
async def process_tickets(
|
||||
process_request: ProcessTicketsRequest,
|
||||
background_tasks: BackgroundTasks,
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Process tickets for summarization"""
|
||||
try:
|
||||
zammad_module = module_manager.get_module("zammad")
|
||||
if not zammad_module:
|
||||
raise HTTPException(status_code=503, detail="Zammad module not available")
|
||||
|
||||
user_id = current_user.get("id") if isinstance(current_user, dict) else current_user.id
|
||||
|
||||
# Debug logging to identify the issue
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Process request filters type: {type(process_request.filters)}")
|
||||
logger.info(f"Process request filters value: {process_request.filters}")
|
||||
|
||||
# Ensure filters is a dict
|
||||
filters = process_request.filters if process_request.filters is not None else {}
|
||||
if not isinstance(filters, dict):
|
||||
logger.error(f"Filters is not a dict: {type(filters)} = {filters}")
|
||||
filters = {}
|
||||
|
||||
request_data = {
|
||||
"action": "process_tickets",
|
||||
"config_id": process_request.config_id,
|
||||
"filters": filters
|
||||
}
|
||||
|
||||
context = {
|
||||
"user_id": user_id,
|
||||
"user_permissions": current_user.get("permissions", [])
|
||||
}
|
||||
|
||||
# Execute processing in background for large batches
|
||||
if filters.get("limit", 10) > 5:
|
||||
# Start background task
|
||||
background_tasks.add_task(
|
||||
_process_tickets_background,
|
||||
zammad_module,
|
||||
request_data,
|
||||
context
|
||||
)
|
||||
|
||||
return {
|
||||
"message": "Processing started in background",
|
||||
"status": "started"
|
||||
}
|
||||
else:
|
||||
# Process immediately for small batches
|
||||
result = await zammad_module.execute_with_interceptors(request_data, context)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error starting ticket processing: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/tickets/{ticket_id}/process")
|
||||
async def process_single_ticket(
|
||||
ticket_id: int,
|
||||
process_request: ProcessSingleTicketRequest,
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Process a single ticket for summarization"""
|
||||
try:
|
||||
zammad_module = module_manager.get_module("zammad")
|
||||
if not zammad_module:
|
||||
raise HTTPException(status_code=503, detail="Zammad module not available")
|
||||
|
||||
user_id = current_user.get("id") if isinstance(current_user, dict) else current_user.id
|
||||
|
||||
request_data = {
|
||||
"action": "process_single_ticket",
|
||||
"ticket_id": ticket_id,
|
||||
"config_id": process_request.config_id
|
||||
}
|
||||
|
||||
context = {
|
||||
"user_id": user_id,
|
||||
"user_permissions": current_user.get("permissions", [])
|
||||
}
|
||||
|
||||
result = await zammad_module.execute_with_interceptors(request_data, context)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error processing ticket: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tickets/{ticket_id}/summary")
|
||||
async def get_ticket_summary(
|
||||
ticket_id: int,
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Get the AI summary for a specific ticket"""
|
||||
try:
|
||||
zammad_module = module_manager.get_module("zammad")
|
||||
if not zammad_module:
|
||||
raise HTTPException(status_code=503, detail="Zammad module not available")
|
||||
|
||||
user_id = current_user.get("id") if isinstance(current_user, dict) else current_user.id
|
||||
|
||||
request_data = {
|
||||
"action": "get_ticket_summary",
|
||||
"ticket_id": ticket_id
|
||||
}
|
||||
|
||||
context = {
|
||||
"user_id": user_id,
|
||||
"user_permissions": current_user.get("permissions", [])
|
||||
}
|
||||
|
||||
result = await zammad_module.execute_with_interceptors(request_data, context)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error getting ticket summary: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/tickets")
|
||||
async def get_processed_tickets(
|
||||
status: Optional[str] = None,
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get list of processed tickets"""
|
||||
user_id = current_user.get("id") if isinstance(current_user, dict) else current_user.id
|
||||
|
||||
try:
|
||||
# Build query
|
||||
query = select(ZammadTicket).where(ZammadTicket.processed_by_user_id == user_id)
|
||||
|
||||
if status:
|
||||
query = query.where(ZammadTicket.processing_status == status)
|
||||
|
||||
query = query.order_by(desc(ZammadTicket.processed_at))
|
||||
query = query.offset(offset).limit(limit)
|
||||
|
||||
# Execute query
|
||||
result = await db.execute(query)
|
||||
tickets = [ticket.to_dict() for ticket in result.scalars()]
|
||||
|
||||
# Get total count
|
||||
count_query = select(ZammadTicket).where(ZammadTicket.processed_by_user_id == user_id)
|
||||
if status:
|
||||
count_query = count_query.where(ZammadTicket.processing_status == status)
|
||||
|
||||
total_result = await db.execute(count_query)
|
||||
total_count = len(list(total_result.scalars()))
|
||||
|
||||
return {
|
||||
"tickets": tickets,
|
||||
"total": total_count,
|
||||
"limit": limit,
|
||||
"offset": offset
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error fetching tickets: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/status")
|
||||
async def get_module_status(
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Get Zammad module status and statistics"""
|
||||
try:
|
||||
zammad_module = module_manager.get_module("zammad")
|
||||
if not zammad_module:
|
||||
raise HTTPException(status_code=503, detail="Zammad module not available")
|
||||
|
||||
user_id = current_user.get("id") if isinstance(current_user, dict) else current_user.id
|
||||
|
||||
request_data = {
|
||||
"action": "get_status"
|
||||
}
|
||||
|
||||
context = {
|
||||
"user_id": user_id,
|
||||
"user_permissions": current_user.get("permissions", [])
|
||||
}
|
||||
|
||||
result = await zammad_module.execute_with_interceptors(request_data, context)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error getting module status: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/processing-logs")
|
||||
async def get_processing_logs(
|
||||
limit: int = 10,
|
||||
offset: int = 0,
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get processing logs for the current user"""
|
||||
user_id = current_user.get("id") if isinstance(current_user, dict) else current_user.id
|
||||
|
||||
try:
|
||||
# Get processing logs
|
||||
query = (
|
||||
select(ZammadProcessingLog)
|
||||
.where(ZammadProcessingLog.initiated_by_user_id == user_id)
|
||||
.order_by(desc(ZammadProcessingLog.started_at))
|
||||
.offset(offset)
|
||||
.limit(limit)
|
||||
)
|
||||
|
||||
result = await db.execute(query)
|
||||
logs = [log.to_dict() for log in result.scalars()]
|
||||
|
||||
# Get total count
|
||||
count_query = select(ZammadProcessingLog).where(
|
||||
ZammadProcessingLog.initiated_by_user_id == user_id
|
||||
)
|
||||
total_result = await db.execute(count_query)
|
||||
total_count = len(list(total_result.scalars()))
|
||||
|
||||
return {
|
||||
"logs": logs,
|
||||
"total": total_count,
|
||||
"limit": limit,
|
||||
"offset": offset
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error fetching processing logs: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/chatbots")
|
||||
async def get_available_chatbots(
|
||||
current_user: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
"""Get list of chatbots available for Zammad integration"""
|
||||
user_id = current_user.get("id") if isinstance(current_user, dict) else current_user.id
|
||||
|
||||
try:
|
||||
# Get user's active chatbots
|
||||
stmt = (
|
||||
select(ChatbotInstance)
|
||||
.where(ChatbotInstance.created_by == str(user_id))
|
||||
.where(ChatbotInstance.is_active == True)
|
||||
.order_by(ChatbotInstance.name)
|
||||
)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
chatbots = []
|
||||
|
||||
for chatbot in result.scalars():
|
||||
# Extract chatbot_type from config JSON or provide default
|
||||
config = chatbot.config or {}
|
||||
chatbot_type = config.get('chatbot_type', 'general')
|
||||
model = config.get('model', 'Unknown')
|
||||
|
||||
chatbots.append({
|
||||
"id": chatbot.id,
|
||||
"name": chatbot.name,
|
||||
"chatbot_type": chatbot_type,
|
||||
"model": model,
|
||||
"description": chatbot.description or ''
|
||||
})
|
||||
|
||||
return {
|
||||
"chatbots": chatbots,
|
||||
"count": len(chatbots)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error fetching chatbots: {str(e)}")
|
||||
|
||||
|
||||
async def _process_tickets_background(zammad_module, request_data: Dict[str, Any], context: Dict[str, Any]):
|
||||
"""Background task for processing tickets"""
|
||||
try:
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Starting background ticket processing with request_data: {request_data}")
|
||||
logger.info(f"Context: {context}")
|
||||
await zammad_module.execute_with_interceptors(request_data, context)
|
||||
except Exception as e:
|
||||
# Log error but don't raise - this is a background task
|
||||
import logging
|
||||
import traceback
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.error(f"Background ticket processing failed: {e}")
|
||||
logger.error(f"Full traceback: {traceback.format_exc()}")
|
||||
|
||||
|
||||
# API key authentication endpoints (for programmatic access)
|
||||
|
||||
@router.post("/api-key/process", dependencies=[Depends(get_api_key_auth)])
|
||||
async def api_process_tickets(
|
||||
process_request: ProcessTicketsRequest,
|
||||
api_key_context: Dict = Depends(get_api_key_auth)
|
||||
):
|
||||
"""Process tickets using API key authentication"""
|
||||
try:
|
||||
zammad_module = module_manager.get_module("zammad")
|
||||
if not zammad_module:
|
||||
raise HTTPException(status_code=503, detail="Zammad module not available")
|
||||
|
||||
user_id = api_key_context["user_id"]
|
||||
|
||||
request_data = {
|
||||
"action": "process_tickets",
|
||||
"config_id": process_request.config_id,
|
||||
"filters": process_request.filters
|
||||
}
|
||||
|
||||
context = {
|
||||
"user_id": user_id,
|
||||
"api_key_id": api_key_context["api_key_id"],
|
||||
"user_permissions": ["modules:*"] # API keys get full module access
|
||||
}
|
||||
|
||||
result = await zammad_module.execute_with_interceptors(request_data, context)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error processing tickets: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/api-key/status", dependencies=[Depends(get_api_key_auth)])
|
||||
async def api_get_status(
|
||||
api_key_context: Dict = Depends(get_api_key_auth)
|
||||
):
|
||||
"""Get module status using API key authentication"""
|
||||
try:
|
||||
zammad_module = module_manager.get_module("zammad")
|
||||
if not zammad_module:
|
||||
raise HTTPException(status_code=503, detail="Zammad module not available")
|
||||
|
||||
user_id = api_key_context["user_id"]
|
||||
|
||||
request_data = {
|
||||
"action": "get_status"
|
||||
}
|
||||
|
||||
context = {
|
||||
"user_id": user_id,
|
||||
"api_key_id": api_key_context["api_key_id"],
|
||||
"user_permissions": ["modules:*"] # API keys get full module access
|
||||
}
|
||||
|
||||
result = await zammad_module.execute_with_interceptors(request_data, context)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error getting status: {str(e)}")
|
||||
@@ -49,6 +49,9 @@ class Settings(BaseSettings):
|
||||
# LLM Service Security
|
||||
LLM_ENCRYPTION_KEY: Optional[str] = None # Key for encrypting LLM provider API keys
|
||||
|
||||
# Plugin System Security
|
||||
PLUGIN_ENCRYPTION_KEY: Optional[str] = None # Key for encrypting plugin secrets and configurations
|
||||
|
||||
# API Keys for LLM providers
|
||||
OPENAI_API_KEY: Optional[str] = None
|
||||
ANTHROPIC_API_KEY: Optional[str] = None
|
||||
@@ -110,6 +113,11 @@ class Settings(BaseSettings):
|
||||
# Module configuration
|
||||
MODULES_CONFIG_PATH: str = "config/modules.yaml"
|
||||
|
||||
# Plugin configuration
|
||||
PLUGINS_DIR: str = "/plugins"
|
||||
PLUGINS_CONFIG_PATH: str = "config/plugins.yaml"
|
||||
PLUGIN_REPOSITORY_URL: str = "https://plugins.enclava.com"
|
||||
|
||||
# Logging
|
||||
LOG_FORMAT: str = "json"
|
||||
LOG_LEVEL: str = "INFO"
|
||||
|
||||
@@ -71,6 +71,15 @@ async def lifespan(app: FastAPI):
|
||||
from app.services.audit_service import start_audit_worker
|
||||
start_audit_worker()
|
||||
|
||||
# Initialize plugin auto-discovery service
|
||||
from app.services.plugin_autodiscovery import initialize_plugin_autodiscovery
|
||||
try:
|
||||
discovery_results = await initialize_plugin_autodiscovery()
|
||||
app.state.plugin_discovery_results = discovery_results
|
||||
logger.info(f"Plugin auto-discovery completed: {discovery_results['summary']}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Plugin auto-discovery failed: {e}")
|
||||
|
||||
logger.info("Platform started successfully")
|
||||
|
||||
yield
|
||||
|
||||
@@ -93,14 +93,17 @@ class SecurityMiddleware(BaseHTTPMiddleware):
|
||||
"""Determine if security analysis should be skipped for this request"""
|
||||
path = request.url.path
|
||||
|
||||
# Skip for health checks and static assets
|
||||
# Skip for health checks, authentication endpoints, and static assets
|
||||
skip_paths = [
|
||||
"/health",
|
||||
"/metrics",
|
||||
"/api/v1/docs",
|
||||
"/api/v1/openapi.json",
|
||||
"/api/v1/redoc",
|
||||
"/favicon.ico"
|
||||
"/favicon.ico",
|
||||
"/api/v1/auth/register",
|
||||
"/api/v1/auth/login",
|
||||
"/", # Root endpoint
|
||||
]
|
||||
|
||||
# Skip for static file extensions
|
||||
|
||||
@@ -12,6 +12,7 @@ from .rag_document import RagDocument
|
||||
from .chatbot import ChatbotInstance, ChatbotConversation, ChatbotMessage, ChatbotAnalytics
|
||||
from .prompt_template import PromptTemplate, ChatbotPromptVariable
|
||||
from .workflow import WorkflowDefinition, WorkflowExecution, WorkflowStepLog
|
||||
from .plugin import Plugin, PluginConfiguration, PluginInstance, PluginAuditLog, PluginCronJob, PluginAPIGateway
|
||||
|
||||
__all__ = [
|
||||
"User",
|
||||
@@ -29,5 +30,11 @@ __all__ = [
|
||||
"ChatbotPromptVariable",
|
||||
"WorkflowDefinition",
|
||||
"WorkflowExecution",
|
||||
"WorkflowStepLog"
|
||||
"WorkflowStepLog",
|
||||
"Plugin",
|
||||
"PluginConfiguration",
|
||||
"PluginInstance",
|
||||
"PluginAuditLog",
|
||||
"PluginCronJob",
|
||||
"PluginAPIGateway"
|
||||
]
|
||||
@@ -65,6 +65,7 @@ class APIKey(Base):
|
||||
# Relationships
|
||||
usage_tracking = relationship("UsageTracking", back_populates="api_key", cascade="all, delete-orphan")
|
||||
budgets = relationship("Budget", back_populates="api_key", cascade="all, delete-orphan")
|
||||
plugin_audit_logs = relationship("PluginAuditLog", back_populates="api_key")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<APIKey(id={self.id}, name='{self.name}', user_id={self.user_id})>"
|
||||
|
||||
@@ -56,6 +56,7 @@ class User(Base):
|
||||
usage_tracking = relationship("UsageTracking", back_populates="user", cascade="all, delete-orphan")
|
||||
budgets = relationship("Budget", back_populates="user", cascade="all, delete-orphan")
|
||||
audit_logs = relationship("AuditLog", back_populates="user", cascade="all, delete-orphan")
|
||||
installed_plugins = relationship("Plugin", back_populates="installed_by_user")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<User(id={self.id}, email='{self.email}', username='{self.username}')>"
|
||||
|
||||
@@ -1,606 +0,0 @@
|
||||
"""
|
||||
API Proxy with comprehensive security interceptors
|
||||
"""
|
||||
import json
|
||||
import time
|
||||
import re
|
||||
from typing import Dict, List, Any, Optional
|
||||
from fastapi import Request, Response, HTTPException, status
|
||||
from fastapi.responses import JSONResponse
|
||||
import httpx
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.logging import get_logger
|
||||
from app.services.api_key_auth import get_api_key_info
|
||||
from app.services.budget_enforcement import check_budget_and_record_usage
|
||||
from app.middleware.rate_limiting import rate_limiter
|
||||
from app.utils.exceptions import ValidationError, AuthenticationError, RateLimitExceeded
|
||||
from app.services.audit_service import create_audit_log
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class SecurityConfiguration:
|
||||
"""Security configuration for API proxy"""
|
||||
|
||||
def __init__(self):
|
||||
self.config = self._load_security_config()
|
||||
|
||||
def _load_security_config(self) -> Dict[str, Any]:
|
||||
"""Load security configuration"""
|
||||
return {
|
||||
"rate_limits": {
|
||||
"global": 10000, # per hour
|
||||
"per_key": 1000, # per hour
|
||||
"per_endpoint": {
|
||||
"/api/llm/v1/chat/completions": 100, # per minute
|
||||
"/api/modules/v1/rag/search": 500, # per hour
|
||||
}
|
||||
},
|
||||
"max_request_size": 10 * 1024 * 1024, # 10MB
|
||||
"max_string_length": 50000,
|
||||
"timeout": 30, # seconds
|
||||
"required_headers": ["X-API-Key"],
|
||||
"ip_whitelist_enabled": False,
|
||||
"ip_whitelist": [],
|
||||
"ip_blacklist": [],
|
||||
"forbidden_patterns": [
|
||||
"<script", "javascript:", "data:text/html", "vbscript:",
|
||||
"union select", "drop table", "insert into", "delete from"
|
||||
],
|
||||
"audit": {
|
||||
"enabled": True,
|
||||
"include_request_body": False,
|
||||
"include_response_body": False,
|
||||
"sensitive_paths": ["/api/platform/v1/auth"]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class RequestValidator:
|
||||
"""Validates API requests against schemas and security policies"""
|
||||
|
||||
def __init__(self, config: SecurityConfiguration):
|
||||
self.config = config
|
||||
self.schemas = self._load_openapi_schemas()
|
||||
|
||||
def _load_openapi_schemas(self) -> Dict[str, Any]:
|
||||
"""Load OpenAPI schemas for validation"""
|
||||
# Would load actual OpenAPI schemas in production
|
||||
return {
|
||||
"POST /api/llm/v1/chat/completions": {
|
||||
"requestBody": {
|
||||
"type": "object",
|
||||
"required": ["model", "messages"],
|
||||
"properties": {
|
||||
"model": {"type": "string"},
|
||||
"messages": {"type": "array"},
|
||||
"temperature": {"type": "number", "minimum": 0, "maximum": 2},
|
||||
"max_tokens": {"type": "integer", "minimum": 1, "maximum": 32000}
|
||||
}
|
||||
}
|
||||
},
|
||||
"POST /api/modules/v1/rag/search": {
|
||||
"requestBody": {
|
||||
"type": "object",
|
||||
"required": ["query"],
|
||||
"properties": {
|
||||
"query": {"type": "string", "maxLength": 1000},
|
||||
"limit": {"type": "integer", "minimum": 1, "maximum": 100}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async def validate(self, path: str, method: str, body: Dict, headers: Dict) -> Dict:
|
||||
"""Validate request against schema and security policies"""
|
||||
|
||||
# Check request size
|
||||
body_str = json.dumps(body)
|
||||
if len(body_str.encode()) > self.config.config["max_request_size"]:
|
||||
raise ValidationError(f"Request size exceeds maximum allowed")
|
||||
|
||||
# Check required headers
|
||||
for header in self.config.config["required_headers"]:
|
||||
if header not in headers:
|
||||
raise ValidationError(f"Missing required header: {header}")
|
||||
|
||||
# Validate against schema if available
|
||||
schema_key = f"{method.upper()} {path}"
|
||||
if schema_key in self.schemas:
|
||||
await self._validate_against_schema(body, self.schemas[schema_key])
|
||||
|
||||
# Security validation
|
||||
self._validate_security_patterns(body)
|
||||
|
||||
return body
|
||||
|
||||
async def _validate_against_schema(self, body: Dict, schema: Dict):
|
||||
"""Validate request body against OpenAPI schema"""
|
||||
request_schema = schema.get("requestBody", {})
|
||||
|
||||
# Basic validation (would use proper JSON schema validator in production)
|
||||
if "required" in request_schema:
|
||||
for field in request_schema["required"]:
|
||||
if field not in body:
|
||||
raise ValidationError(f"Missing required field: {field}")
|
||||
|
||||
if "properties" in request_schema:
|
||||
for field, constraints in request_schema["properties"].items():
|
||||
if field in body:
|
||||
await self._validate_field(field, body[field], constraints)
|
||||
|
||||
async def _validate_field(self, field_name: str, value: Any, constraints: Dict):
|
||||
"""Validate individual field against constraints"""
|
||||
field_type = constraints.get("type")
|
||||
|
||||
if field_type == "string":
|
||||
if not isinstance(value, str):
|
||||
raise ValidationError(f"Field {field_name} must be a string")
|
||||
if "maxLength" in constraints and len(value) > constraints["maxLength"]:
|
||||
raise ValidationError(f"Field {field_name} exceeds maximum length")
|
||||
|
||||
elif field_type == "integer":
|
||||
if not isinstance(value, int):
|
||||
raise ValidationError(f"Field {field_name} must be an integer")
|
||||
if "minimum" in constraints and value < constraints["minimum"]:
|
||||
raise ValidationError(f"Field {field_name} below minimum value")
|
||||
if "maximum" in constraints and value > constraints["maximum"]:
|
||||
raise ValidationError(f"Field {field_name} exceeds maximum value")
|
||||
|
||||
elif field_type == "number":
|
||||
if not isinstance(value, (int, float)):
|
||||
raise ValidationError(f"Field {field_name} must be a number")
|
||||
if "minimum" in constraints and value < constraints["minimum"]:
|
||||
raise ValidationError(f"Field {field_name} below minimum value")
|
||||
if "maximum" in constraints and value > constraints["maximum"]:
|
||||
raise ValidationError(f"Field {field_name} exceeds maximum value")
|
||||
|
||||
def _validate_security_patterns(self, body: Dict):
|
||||
"""Check for forbidden security patterns"""
|
||||
body_str = json.dumps(body).lower()
|
||||
|
||||
for pattern in self.config.config["forbidden_patterns"]:
|
||||
if pattern.lower() in body_str:
|
||||
raise ValidationError(f"Request contains forbidden pattern: {pattern}")
|
||||
|
||||
|
||||
class APISecurityProxy:
|
||||
"""Main API security proxy with interceptor pattern"""
|
||||
|
||||
def __init__(self):
|
||||
self.config = SecurityConfiguration()
|
||||
self.request_validator = RequestValidator(self.config)
|
||||
|
||||
async def proxy_request(self, request: Request, path: str) -> Response:
|
||||
"""
|
||||
Main proxy method that implements the full interceptor pattern
|
||||
"""
|
||||
start_time = time.time()
|
||||
api_key_info = None
|
||||
user_permissions = []
|
||||
|
||||
try:
|
||||
# 1. Extract and validate API key
|
||||
api_key_info = await self._extract_and_validate_api_key(request)
|
||||
if api_key_info:
|
||||
user_permissions = api_key_info.get("permissions", [])
|
||||
|
||||
# 2. IP validation (if enabled)
|
||||
await self._validate_ip_address(request)
|
||||
|
||||
# 3. Rate limiting
|
||||
await self._check_rate_limits(request, path, api_key_info)
|
||||
|
||||
# 4. Request validation and sanitization
|
||||
request_body = await self._get_request_body(request)
|
||||
validated_body = await self.request_validator.validate(
|
||||
path=path,
|
||||
method=request.method,
|
||||
body=request_body,
|
||||
headers=dict(request.headers)
|
||||
)
|
||||
|
||||
# 5. Sanitize request
|
||||
sanitized_body = self._sanitize_request(validated_body)
|
||||
|
||||
# 6. Budget checking (for LLM endpoints)
|
||||
if path.startswith("/api/llm/"):
|
||||
await self._check_budget_constraints(api_key_info, sanitized_body)
|
||||
|
||||
# 7. Build proxy headers
|
||||
proxy_headers = self._build_proxy_headers(request, api_key_info)
|
||||
|
||||
# 8. Log security event
|
||||
await self._log_security_event(
|
||||
request=request,
|
||||
path=path,
|
||||
api_key_info=api_key_info,
|
||||
sanitized_body=sanitized_body
|
||||
)
|
||||
|
||||
# 9. Forward request to appropriate backend
|
||||
response = await self._forward_request(
|
||||
path=path,
|
||||
method=request.method,
|
||||
body=sanitized_body,
|
||||
headers=proxy_headers
|
||||
)
|
||||
|
||||
# 10. Validate and sanitize response
|
||||
validated_response = await self._process_response(path, response)
|
||||
|
||||
# 11. Record usage metrics
|
||||
await self._record_usage_metrics(
|
||||
api_key_info=api_key_info,
|
||||
path=path,
|
||||
duration=time.time() - start_time,
|
||||
success=True
|
||||
)
|
||||
|
||||
return validated_response
|
||||
|
||||
except Exception as e:
|
||||
# Error handling and logging
|
||||
await self._handle_error(
|
||||
request=request,
|
||||
path=path,
|
||||
api_key_info=api_key_info,
|
||||
error=e,
|
||||
duration=time.time() - start_time
|
||||
)
|
||||
|
||||
# Return appropriate error response
|
||||
return await self._create_error_response(e)
|
||||
|
||||
async def _extract_and_validate_api_key(self, request: Request) -> Optional[Dict[str, Any]]:
|
||||
"""Extract and validate API key from request"""
|
||||
|
||||
# Try different auth methods
|
||||
api_key = None
|
||||
|
||||
# Bearer token
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
if auth_header.startswith("Bearer "):
|
||||
api_key = auth_header[7:]
|
||||
|
||||
# X-API-Key header
|
||||
elif request.headers.get("X-API-Key"):
|
||||
api_key = request.headers.get("X-API-Key")
|
||||
|
||||
if not api_key:
|
||||
raise AuthenticationError("Missing API key")
|
||||
|
||||
# Validate API key
|
||||
api_key_info = await get_api_key_info(api_key)
|
||||
if not api_key_info:
|
||||
raise AuthenticationError("Invalid API key")
|
||||
|
||||
if not api_key_info.get("is_active", False):
|
||||
raise AuthenticationError("API key is disabled")
|
||||
|
||||
return api_key_info
|
||||
|
||||
async def _validate_ip_address(self, request: Request):
|
||||
"""Validate client IP address against whitelist/blacklist"""
|
||||
client_ip = request.client.host
|
||||
forwarded_for = request.headers.get("X-Forwarded-For")
|
||||
if forwarded_for:
|
||||
client_ip = forwarded_for.split(",")[0].strip()
|
||||
|
||||
config = self.config.config
|
||||
|
||||
# Check blacklist
|
||||
if client_ip in config["ip_blacklist"]:
|
||||
raise AuthenticationError(f"IP address {client_ip} is blacklisted")
|
||||
|
||||
# Check whitelist (if enabled)
|
||||
if config["ip_whitelist_enabled"] and client_ip not in config["ip_whitelist"]:
|
||||
raise AuthenticationError(f"IP address {client_ip} is not whitelisted")
|
||||
|
||||
async def _check_rate_limits(self, request: Request, path: str, api_key_info: Optional[Dict]):
|
||||
"""Check rate limits for the request"""
|
||||
client_ip = request.client.host
|
||||
api_key = api_key_info.get("key_prefix", "") if api_key_info else None
|
||||
|
||||
# Use existing rate limiter
|
||||
if api_key:
|
||||
# API key-based rate limiting
|
||||
rate_limit_key = f"api_key:{api_key}"
|
||||
limit_per_minute = api_key_info.get("rate_limit_per_minute", 100)
|
||||
limit_per_hour = api_key_info.get("rate_limit_per_hour", 1000)
|
||||
|
||||
# Check per-minute limit
|
||||
is_allowed_minute, _ = await rate_limiter.check_rate_limit(
|
||||
rate_limit_key, limit_per_minute, 60, "minute"
|
||||
)
|
||||
|
||||
# Check per-hour limit
|
||||
is_allowed_hour, _ = await rate_limiter.check_rate_limit(
|
||||
rate_limit_key, limit_per_hour, 3600, "hour"
|
||||
)
|
||||
|
||||
if not (is_allowed_minute and is_allowed_hour):
|
||||
raise RateLimitExceeded("API key rate limit exceeded")
|
||||
|
||||
else:
|
||||
# IP-based rate limiting for unauthenticated requests
|
||||
rate_limit_key = f"ip:{client_ip}"
|
||||
|
||||
is_allowed_minute, _ = await rate_limiter.check_rate_limit(
|
||||
rate_limit_key, 20, 60, "minute"
|
||||
)
|
||||
|
||||
if not is_allowed_minute:
|
||||
raise RateLimitExceeded("IP rate limit exceeded")
|
||||
|
||||
async def _get_request_body(self, request: Request) -> Dict[str, Any]:
|
||||
"""Extract request body"""
|
||||
try:
|
||||
if request.method in ["POST", "PUT", "PATCH"]:
|
||||
return await request.json()
|
||||
else:
|
||||
return {}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
def _sanitize_request(self, body: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Sanitize request data"""
|
||||
def sanitize_value(value):
|
||||
if isinstance(value, str):
|
||||
# Remove forbidden patterns
|
||||
for pattern in self.config.config["forbidden_patterns"]:
|
||||
value = re.sub(re.escape(pattern), "", value, flags=re.IGNORECASE)
|
||||
|
||||
# Limit string length
|
||||
max_length = self.config.config["max_string_length"]
|
||||
if len(value) > max_length:
|
||||
value = value[:max_length]
|
||||
logger.warning(f"Truncated long string in request: {len(value)} chars")
|
||||
|
||||
return value
|
||||
elif isinstance(value, dict):
|
||||
return {k: sanitize_value(v) for k, v in value.items()}
|
||||
elif isinstance(value, list):
|
||||
return [sanitize_value(item) for item in value]
|
||||
else:
|
||||
return value
|
||||
|
||||
return sanitize_value(body)
|
||||
|
||||
async def _check_budget_constraints(self, api_key_info: Dict, body: Dict):
|
||||
"""Check budget constraints for LLM requests"""
|
||||
if not api_key_info:
|
||||
return
|
||||
|
||||
# Estimate cost based on request
|
||||
estimated_cost = self._estimate_request_cost(body)
|
||||
|
||||
# Check budget
|
||||
user_id = api_key_info.get("user_id")
|
||||
api_key_id = api_key_info.get("id")
|
||||
|
||||
budget_ok = await check_budget_and_record_usage(
|
||||
user_id=user_id,
|
||||
api_key_id=api_key_id,
|
||||
estimated_cost=estimated_cost,
|
||||
actual_cost=0, # Will be updated after response
|
||||
metadata={"endpoint": "llm_proxy", "model": body.get("model", "unknown")}
|
||||
)
|
||||
|
||||
if not budget_ok:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_402_PAYMENT_REQUIRED,
|
||||
detail="Budget limit exceeded"
|
||||
)
|
||||
|
||||
def _estimate_request_cost(self, body: Dict) -> float:
|
||||
"""Estimate cost of LLM request"""
|
||||
# Rough estimation based on model and tokens
|
||||
model = body.get("model", "gpt-3.5-turbo")
|
||||
messages = body.get("messages", [])
|
||||
max_tokens = body.get("max_tokens", 1000)
|
||||
|
||||
# Estimate input tokens
|
||||
input_text = " ".join([msg.get("content", "") for msg in messages if isinstance(msg, dict)])
|
||||
input_tokens = len(input_text.split()) * 1.3 # Rough approximation
|
||||
|
||||
# Model pricing (simplified)
|
||||
pricing = {
|
||||
"gpt-4": {"input": 0.03, "output": 0.06}, # per 1K tokens
|
||||
"gpt-3.5-turbo": {"input": 0.001, "output": 0.002},
|
||||
"claude-3-sonnet": {"input": 0.003, "output": 0.015},
|
||||
"claude-3-haiku": {"input": 0.00025, "output": 0.00125}
|
||||
}
|
||||
|
||||
model_pricing = pricing.get(model, pricing["gpt-3.5-turbo"])
|
||||
|
||||
estimated_cost = (
|
||||
(input_tokens / 1000) * model_pricing["input"] +
|
||||
(max_tokens / 1000) * model_pricing["output"]
|
||||
)
|
||||
|
||||
return estimated_cost
|
||||
|
||||
def _build_proxy_headers(self, request: Request, api_key_info: Optional[Dict]) -> Dict[str, str]:
|
||||
"""Build headers for proxy request"""
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": f"ConfidentialEmpire-Proxy/1.0",
|
||||
"X-Forwarded-For": request.client.host,
|
||||
"X-Request-ID": f"req_{int(time.time() * 1000)}"
|
||||
}
|
||||
|
||||
if api_key_info:
|
||||
headers["X-User-ID"] = str(api_key_info.get("user_id", ""))
|
||||
headers["X-API-Key-ID"] = str(api_key_info.get("id", ""))
|
||||
|
||||
return headers
|
||||
|
||||
async def _log_security_event(self, request: Request, path: str, api_key_info: Optional[Dict], sanitized_body: Dict):
|
||||
"""Log security event for audit trail"""
|
||||
await create_audit_log(
|
||||
action=f"api_proxy_{request.method.lower()}",
|
||||
resource_type="api_endpoint",
|
||||
resource_id=path,
|
||||
user_id=api_key_info.get("user_id") if api_key_info else None,
|
||||
success=True,
|
||||
ip_address=request.client.host,
|
||||
user_agent=request.headers.get("User-Agent", ""),
|
||||
metadata={
|
||||
"endpoint": path,
|
||||
"method": request.method,
|
||||
"api_key_id": api_key_info.get("id") if api_key_info else None,
|
||||
"request_size": len(json.dumps(sanitized_body))
|
||||
}
|
||||
)
|
||||
|
||||
async def _forward_request(self, path: str, method: str, body: Dict, headers: Dict) -> Dict:
|
||||
"""Forward request to appropriate backend service"""
|
||||
|
||||
# Determine target service based on path
|
||||
if path.startswith("/api/llm/"):
|
||||
target_url = f"{settings.LITELLM_BASE_URL}{path}"
|
||||
target_headers = {**headers, "Authorization": f"Bearer {settings.LITELLM_MASTER_KEY}"}
|
||||
elif path.startswith("/api/modules/"):
|
||||
# Route to module system
|
||||
return await self._route_to_module(path, method, body, headers)
|
||||
else:
|
||||
raise ValidationError(f"Unknown endpoint: {path}")
|
||||
|
||||
# Make HTTP request to target service
|
||||
timeout = self.config.config["timeout"]
|
||||
|
||||
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||
if method == "GET":
|
||||
response = await client.get(target_url, headers=target_headers)
|
||||
elif method == "POST":
|
||||
response = await client.post(target_url, json=body, headers=target_headers)
|
||||
elif method == "PUT":
|
||||
response = await client.put(target_url, json=body, headers=target_headers)
|
||||
elif method == "DELETE":
|
||||
response = await client.delete(target_url, headers=target_headers)
|
||||
else:
|
||||
raise ValidationError(f"Unsupported HTTP method: {method}")
|
||||
|
||||
if response.status_code >= 400:
|
||||
raise HTTPException(status_code=response.status_code, detail=response.text)
|
||||
|
||||
return response.json()
|
||||
|
||||
async def _route_to_module(self, path: str, method: str, body: Dict, headers: Dict) -> Dict:
|
||||
"""Route request to module system"""
|
||||
# Extract module name from path
|
||||
# e.g., /api/modules/v1/rag/search -> module: rag, action: search
|
||||
path_parts = path.strip("/").split("/")
|
||||
if len(path_parts) >= 4:
|
||||
module_name = path_parts[3]
|
||||
action = path_parts[4] if len(path_parts) > 4 else "execute"
|
||||
else:
|
||||
raise ValidationError("Invalid module path")
|
||||
|
||||
# Import module manager
|
||||
from app.services.module_manager import module_manager
|
||||
|
||||
if module_name not in module_manager.modules:
|
||||
raise ValidationError(f"Module not found: {module_name}")
|
||||
|
||||
module = module_manager.modules[module_name]
|
||||
|
||||
# Prepare context
|
||||
context = {
|
||||
"user_id": headers.get("X-User-ID"),
|
||||
"api_key_id": headers.get("X-API-Key-ID"),
|
||||
"ip_address": headers.get("X-Forwarded-For"),
|
||||
"user_permissions": [] # Would be populated from API key info
|
||||
}
|
||||
|
||||
# Prepare request
|
||||
module_request = {
|
||||
"action": action,
|
||||
"method": method,
|
||||
**body
|
||||
}
|
||||
|
||||
# Execute through module's interceptor chain
|
||||
if hasattr(module, 'execute_with_interceptors'):
|
||||
return await module.execute_with_interceptors(module_request, context)
|
||||
else:
|
||||
# Fallback for legacy modules
|
||||
if hasattr(module, action):
|
||||
return await getattr(module, action)(module_request)
|
||||
else:
|
||||
raise ValidationError(f"Action not supported: {action}")
|
||||
|
||||
async def _process_response(self, path: str, response: Dict) -> JSONResponse:
|
||||
"""Process and validate response"""
|
||||
# Add security headers
|
||||
headers = {
|
||||
"X-Content-Type-Options": "nosniff",
|
||||
"X-Frame-Options": "DENY",
|
||||
"X-XSS-Protection": "1; mode=block",
|
||||
"Strict-Transport-Security": "max-age=31536000; includeSubDomains"
|
||||
}
|
||||
|
||||
return JSONResponse(content=response, headers=headers)
|
||||
|
||||
async def _record_usage_metrics(self, api_key_info: Optional[Dict], path: str, duration: float, success: bool):
|
||||
"""Record usage metrics"""
|
||||
if api_key_info:
|
||||
# Record API key usage
|
||||
# This would update database metrics
|
||||
pass
|
||||
|
||||
async def _handle_error(self, request: Request, path: str, api_key_info: Optional[Dict], error: Exception, duration: float):
|
||||
"""Handle and log errors"""
|
||||
await create_audit_log(
|
||||
action=f"api_proxy_{request.method.lower()}",
|
||||
resource_type="api_endpoint",
|
||||
resource_id=path,
|
||||
user_id=api_key_info.get("user_id") if api_key_info else None,
|
||||
success=False,
|
||||
error_message=str(error),
|
||||
ip_address=request.client.host,
|
||||
user_agent=request.headers.get("User-Agent", ""),
|
||||
metadata={
|
||||
"endpoint": path,
|
||||
"method": request.method,
|
||||
"duration_ms": int(duration * 1000),
|
||||
"error_type": type(error).__name__
|
||||
}
|
||||
)
|
||||
|
||||
async def _create_error_response(self, error: Exception) -> JSONResponse:
|
||||
"""Create appropriate error response"""
|
||||
if isinstance(error, AuthenticationError):
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
content={"error": "AUTHENTICATION_ERROR", "message": str(error)}
|
||||
)
|
||||
elif isinstance(error, ValidationError):
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
content={"error": "VALIDATION_ERROR", "message": str(error)}
|
||||
)
|
||||
elif isinstance(error, RateLimitExceeded):
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||
content={"error": "RATE_LIMIT_EXCEEDED", "message": str(error)}
|
||||
)
|
||||
elif isinstance(error, HTTPException):
|
||||
return JSONResponse(
|
||||
status_code=error.status_code,
|
||||
content={"error": "HTTP_ERROR", "message": error.detail}
|
||||
)
|
||||
else:
|
||||
logger.error(f"Unexpected error in API proxy: {error}")
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
content={"error": "INTERNAL_ERROR", "message": "An unexpected error occurred"}
|
||||
)
|
||||
|
||||
|
||||
# Global proxy instance
|
||||
api_security_proxy = APISecurityProxy()
|
||||
@@ -355,17 +355,7 @@ class PluginConfigurationManager:
|
||||
) -> PluginConfiguration:
|
||||
"""Save plugin configuration with automatic encryption of sensitive fields"""
|
||||
|
||||
# Validate configuration against schema
|
||||
is_valid, errors = await self.schema_manager.validate_configuration(plugin_id, config_data, db)
|
||||
if not is_valid:
|
||||
raise PluginError(f"Configuration validation failed: {', '.join(errors)}")
|
||||
|
||||
# Process fields (separate sensitive from non-sensitive)
|
||||
non_sensitive, encrypted_sensitive = await self.schema_manager.process_configuration_fields(
|
||||
plugin_id, config_data, db, encrypt_sensitive=True
|
||||
)
|
||||
|
||||
# Check for existing configuration
|
||||
# Check for existing configuration to handle empty sensitive fields
|
||||
plugin_uuid = self.schema_manager._ensure_uuid(plugin_id)
|
||||
stmt = select(PluginConfiguration).where(
|
||||
PluginConfiguration.plugin_id == plugin_uuid,
|
||||
@@ -375,6 +365,116 @@ class PluginConfigurationManager:
|
||||
result = await db.execute(stmt)
|
||||
existing_config = result.scalar_one_or_none()
|
||||
|
||||
# Handle validation for existing vs new configurations
|
||||
validation_passed = False
|
||||
|
||||
if existing_config:
|
||||
# For existing configurations, use relaxed validation for empty sensitive fields
|
||||
try:
|
||||
# Try to get existing decrypted configuration
|
||||
existing_data = await self.get_plugin_configuration(plugin_id, user_id, db, decrypt_sensitive=True)
|
||||
|
||||
if existing_data:
|
||||
# Successfully decrypted - preserve existing sensitive fields
|
||||
schema = await self.schema_manager.get_plugin_schema(plugin_id, db)
|
||||
if schema:
|
||||
sensitive_fields = self.schema_manager.encryption.identify_sensitive_fields(schema)
|
||||
validation_config = config_data.copy()
|
||||
|
||||
for field in sensitive_fields:
|
||||
if not validation_config.get(field) or str(validation_config.get(field)).strip() == '':
|
||||
if existing_data.get(field):
|
||||
validation_config[field] = existing_data[field]
|
||||
|
||||
# Validate with complete config
|
||||
is_valid, errors = await self.schema_manager.validate_configuration(plugin_id, validation_config, db)
|
||||
if is_valid:
|
||||
validation_passed = True
|
||||
else:
|
||||
raise PluginError(f"Configuration validation failed: {', '.join(errors)}")
|
||||
|
||||
except Exception as e:
|
||||
# Decryption failed - use relaxed validation for updates
|
||||
self.logger.warning(f"Using relaxed validation due to decryption error: {e}")
|
||||
|
||||
schema = await self.schema_manager.get_plugin_schema(plugin_id, db)
|
||||
if schema:
|
||||
# Create relaxed schema that allows empty sensitive fields for existing configs
|
||||
relaxed_schema = schema.copy()
|
||||
sensitive_fields = self.schema_manager.encryption.identify_sensitive_fields(schema)
|
||||
|
||||
for field in sensitive_fields:
|
||||
if field in relaxed_schema.get("properties", {}):
|
||||
field_props = relaxed_schema["properties"][field]
|
||||
# If field is empty, relax validation requirements
|
||||
if not config_data.get(field) or str(config_data.get(field)).strip() == '':
|
||||
# Remove minLength and other constraints
|
||||
field_props.pop("minLength", None)
|
||||
field_props.pop("pattern", None)
|
||||
# Make it optional
|
||||
if "required" in relaxed_schema and field in relaxed_schema["required"]:
|
||||
relaxed_schema["required"] = [r for r in relaxed_schema["required"] if r != field]
|
||||
|
||||
# Validate with relaxed schema
|
||||
try:
|
||||
jsonschema.validate(config_data, relaxed_schema)
|
||||
validation_passed = True
|
||||
validation_config = config_data
|
||||
except jsonschema.ValidationError as ve:
|
||||
raise PluginError(f"Configuration validation failed: {ve}")
|
||||
else:
|
||||
# New configuration - full validation required
|
||||
is_valid, errors = await self.schema_manager.validate_configuration(plugin_id, config_data, db)
|
||||
if is_valid:
|
||||
validation_passed = True
|
||||
validation_config = config_data
|
||||
else:
|
||||
raise PluginError(f"Configuration validation failed: {', '.join(errors)}")
|
||||
|
||||
if not validation_passed:
|
||||
raise PluginError("Configuration validation failed")
|
||||
|
||||
# Handle encryption for new vs existing configurations
|
||||
if existing_config and existing_config.encrypted_data:
|
||||
# For existing configs, preserve encrypted data for fields not provided
|
||||
try:
|
||||
existing_encrypted = json.loads(existing_config.encrypted_data)
|
||||
except:
|
||||
existing_encrypted = {}
|
||||
|
||||
# Identify which sensitive fields are actually being updated
|
||||
schema = await self.schema_manager.get_plugin_schema(plugin_id, db)
|
||||
sensitive_fields = self.schema_manager.encryption.identify_sensitive_fields(schema) if schema else []
|
||||
|
||||
# Process only fields that have new values
|
||||
fields_to_encrypt = {}
|
||||
for field in sensitive_fields:
|
||||
if config_data.get(field) and str(config_data.get(field)).strip():
|
||||
# User provided new value - encrypt it
|
||||
fields_to_encrypt[field] = config_data[field]
|
||||
|
||||
# Encrypt new fields
|
||||
new_encrypted = {}
|
||||
if fields_to_encrypt:
|
||||
for field, value in fields_to_encrypt.items():
|
||||
new_encrypted[field] = self.schema_manager.encryption.encrypt_value(str(value))
|
||||
|
||||
# Combine existing and new encrypted data
|
||||
final_encrypted = {**existing_encrypted, **new_encrypted}
|
||||
|
||||
# Process non-sensitive fields
|
||||
non_sensitive = {}
|
||||
for key, value in config_data.items():
|
||||
if key not in sensitive_fields:
|
||||
non_sensitive[key] = value
|
||||
|
||||
encrypted_sensitive = final_encrypted
|
||||
else:
|
||||
# New configuration or no existing encrypted data - process normally
|
||||
non_sensitive, encrypted_sensitive = await self.schema_manager.process_configuration_fields(
|
||||
plugin_id, validation_config, db, encrypt_sensitive=True
|
||||
)
|
||||
|
||||
if existing_config:
|
||||
# Update existing configuration
|
||||
existing_config.config_data = non_sensitive
|
||||
|
||||
@@ -144,6 +144,57 @@ class ConfigurationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class PluginError(CustomHTTPException):
|
||||
"""Plugin error"""
|
||||
|
||||
def __init__(self, detail: str = "Plugin error", details: Optional[Dict[str, Any]] = None):
|
||||
super().__init__(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
error_code="PLUGIN_ERROR",
|
||||
detail=detail,
|
||||
details=details,
|
||||
)
|
||||
|
||||
|
||||
class SecurityError(CustomHTTPException):
|
||||
"""Security error"""
|
||||
|
||||
def __init__(self, detail: str = "Security violation", details: Optional[Dict[str, Any]] = None):
|
||||
super().__init__(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
error_code="SECURITY_ERROR",
|
||||
detail=detail,
|
||||
details=details,
|
||||
)
|
||||
|
||||
|
||||
class PluginLoadError(Exception):
|
||||
"""Plugin load error"""
|
||||
pass
|
||||
|
||||
|
||||
class PluginInstallError(Exception):
|
||||
"""Plugin installation error"""
|
||||
pass
|
||||
|
||||
|
||||
class PluginSecurityError(Exception):
|
||||
"""Plugin security error"""
|
||||
pass
|
||||
|
||||
|
||||
class DatabaseError(CustomHTTPException):
|
||||
"""Database error"""
|
||||
|
||||
def __init__(self, detail: str = "Database error", details: Optional[Dict[str, Any]] = None):
|
||||
super().__init__(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
error_code="DATABASE_ERROR",
|
||||
detail=detail,
|
||||
details=details,
|
||||
)
|
||||
|
||||
|
||||
# Aliases for backwards compatibility
|
||||
RateLimitExceeded = RateLimitError
|
||||
APIException = CustomHTTPException # Generic API exception alias
|
||||
@@ -1,13 +0,0 @@
|
||||
"""
|
||||
Zammad Integration Module for Enclava Platform
|
||||
|
||||
AI-powered ticket summarization for Zammad ticketing system.
|
||||
Replaces Ollama with Enclava's chatbot system for enhanced security and flexibility.
|
||||
"""
|
||||
|
||||
from .main import ZammadModule
|
||||
|
||||
__version__ = "1.0.0"
|
||||
__author__ = "Enclava Platform"
|
||||
|
||||
__all__ = ["ZammadModule"]
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,241 +0,0 @@
|
||||
"""
|
||||
Database models for Zammad Integration Module
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional, Dict, Any
|
||||
from enum import Enum
|
||||
from sqlalchemy import Column, Integer, String, DateTime, Boolean, Text, JSON, ForeignKey, Index
|
||||
from sqlalchemy.orm import relationship
|
||||
from app.db.database import Base
|
||||
|
||||
|
||||
class TicketState(str, Enum):
|
||||
"""Zammad ticket state enumeration"""
|
||||
NEW = "new"
|
||||
OPEN = "open"
|
||||
PENDING_REMINDER = "pending reminder"
|
||||
PENDING_CLOSE = "pending close"
|
||||
CLOSED = "closed"
|
||||
MERGED = "merged"
|
||||
REMOVED = "removed"
|
||||
|
||||
|
||||
class ProcessingStatus(str, Enum):
|
||||
"""Ticket processing status"""
|
||||
PENDING = "pending"
|
||||
PROCESSING = "processing"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
SKIPPED = "skipped"
|
||||
|
||||
|
||||
class ZammadTicket(Base):
|
||||
"""Model for tracking Zammad tickets and their processing status"""
|
||||
|
||||
__tablename__ = "zammad_tickets"
|
||||
|
||||
# Primary key
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
|
||||
# Zammad ticket information
|
||||
zammad_ticket_id = Column(Integer, unique=True, index=True, nullable=False)
|
||||
ticket_number = Column(String, index=True, nullable=False)
|
||||
title = Column(String, nullable=False)
|
||||
state = Column(String, nullable=False) # Zammad state
|
||||
priority = Column(String, nullable=True)
|
||||
customer_email = Column(String, nullable=True)
|
||||
|
||||
# Processing information
|
||||
processing_status = Column(String, default=ProcessingStatus.PENDING.value, nullable=False)
|
||||
processed_at = Column(DateTime, nullable=True)
|
||||
processed_by_user_id = Column(Integer, ForeignKey("users.id"), nullable=True)
|
||||
chatbot_id = Column(String, nullable=True)
|
||||
|
||||
# Summary and context
|
||||
summary = Column(Text, nullable=True)
|
||||
context_data = Column(JSON, nullable=True) # Original ticket data
|
||||
error_message = Column(Text, nullable=True)
|
||||
|
||||
# Metadata
|
||||
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
|
||||
|
||||
# Zammad specific metadata
|
||||
zammad_created_at = Column(DateTime, nullable=True)
|
||||
zammad_updated_at = Column(DateTime, nullable=True)
|
||||
zammad_article_count = Column(Integer, default=0, nullable=False)
|
||||
|
||||
# Processing configuration snapshot
|
||||
config_snapshot = Column(JSON, nullable=True) # Config used during processing
|
||||
|
||||
# Relationships
|
||||
processed_by = relationship("User", foreign_keys=[processed_by_user_id])
|
||||
|
||||
# Indexes for better query performance
|
||||
__table_args__ = (
|
||||
Index("idx_zammad_tickets_status_created", "processing_status", "created_at"),
|
||||
Index("idx_zammad_tickets_state_processed", "state", "processed_at"),
|
||||
Index("idx_zammad_tickets_user_status", "processed_by_user_id", "processing_status"),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for API responses"""
|
||||
return {
|
||||
"id": self.id,
|
||||
"zammad_ticket_id": self.zammad_ticket_id,
|
||||
"ticket_number": self.ticket_number,
|
||||
"title": self.title,
|
||||
"state": self.state,
|
||||
"priority": self.priority,
|
||||
"customer_email": self.customer_email,
|
||||
"processing_status": self.processing_status,
|
||||
"processed_at": self.processed_at.isoformat() if self.processed_at else None,
|
||||
"processed_by_user_id": self.processed_by_user_id,
|
||||
"chatbot_id": self.chatbot_id,
|
||||
"summary": self.summary,
|
||||
"error_message": self.error_message,
|
||||
"created_at": self.created_at.isoformat(),
|
||||
"updated_at": self.updated_at.isoformat(),
|
||||
"zammad_created_at": self.zammad_created_at.isoformat() if self.zammad_created_at else None,
|
||||
"zammad_updated_at": self.zammad_updated_at.isoformat() if self.zammad_updated_at else None,
|
||||
"zammad_article_count": self.zammad_article_count
|
||||
}
|
||||
|
||||
|
||||
class ZammadProcessingLog(Base):
|
||||
"""Model for logging Zammad processing activities"""
|
||||
|
||||
__tablename__ = "zammad_processing_logs"
|
||||
|
||||
# Primary key
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
|
||||
# Processing batch information
|
||||
batch_id = Column(String, index=True, nullable=False) # UUID for batch processing
|
||||
started_at = Column(DateTime, default=datetime.utcnow, nullable=False)
|
||||
completed_at = Column(DateTime, nullable=True)
|
||||
initiated_by_user_id = Column(Integer, ForeignKey("users.id"), nullable=True)
|
||||
|
||||
# Processing configuration
|
||||
config_used = Column(JSON, nullable=True)
|
||||
filters_applied = Column(JSON, nullable=True) # State, limit, etc.
|
||||
|
||||
# Results
|
||||
tickets_found = Column(Integer, default=0, nullable=False)
|
||||
tickets_processed = Column(Integer, default=0, nullable=False)
|
||||
tickets_failed = Column(Integer, default=0, nullable=False)
|
||||
tickets_skipped = Column(Integer, default=0, nullable=False)
|
||||
|
||||
# Performance metrics
|
||||
processing_time_seconds = Column(Integer, nullable=True)
|
||||
average_time_per_ticket = Column(Integer, nullable=True) # milliseconds
|
||||
|
||||
# Error tracking
|
||||
errors_encountered = Column(JSON, nullable=True) # List of error messages
|
||||
status = Column(String, default="running", nullable=False) # running, completed, failed
|
||||
|
||||
# Relationships
|
||||
initiated_by = relationship("User", foreign_keys=[initiated_by_user_id])
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_processing_logs_batch_status", "batch_id", "status"),
|
||||
Index("idx_processing_logs_user_started", "initiated_by_user_id", "started_at"),
|
||||
)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for API responses"""
|
||||
return {
|
||||
"id": self.id,
|
||||
"batch_id": self.batch_id,
|
||||
"started_at": self.started_at.isoformat(),
|
||||
"completed_at": self.completed_at.isoformat() if self.completed_at else None,
|
||||
"initiated_by_user_id": self.initiated_by_user_id,
|
||||
"config_used": self.config_used,
|
||||
"filters_applied": self.filters_applied,
|
||||
"tickets_found": self.tickets_found,
|
||||
"tickets_processed": self.tickets_processed,
|
||||
"tickets_failed": self.tickets_failed,
|
||||
"tickets_skipped": self.tickets_skipped,
|
||||
"processing_time_seconds": self.processing_time_seconds,
|
||||
"average_time_per_ticket": self.average_time_per_ticket,
|
||||
"errors_encountered": self.errors_encountered,
|
||||
"status": self.status
|
||||
}
|
||||
|
||||
|
||||
class ZammadConfiguration(Base):
|
||||
"""Model for storing Zammad module configurations per user"""
|
||||
|
||||
__tablename__ = "zammad_configurations"
|
||||
|
||||
# Primary key
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
|
||||
# User association
|
||||
user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
|
||||
|
||||
# Configuration name and description
|
||||
name = Column(String, nullable=False)
|
||||
description = Column(Text, nullable=True)
|
||||
is_default = Column(Boolean, default=False, nullable=False)
|
||||
is_active = Column(Boolean, default=True, nullable=False)
|
||||
|
||||
# Zammad connection settings
|
||||
zammad_url = Column(String, nullable=False)
|
||||
api_token_encrypted = Column(String, nullable=False) # Encrypted API token
|
||||
|
||||
# Processing settings
|
||||
chatbot_id = Column(String, nullable=False)
|
||||
process_state = Column(String, default="open", nullable=False)
|
||||
max_tickets = Column(Integer, default=10, nullable=False)
|
||||
skip_existing = Column(Boolean, default=True, nullable=False)
|
||||
auto_process = Column(Boolean, default=False, nullable=False)
|
||||
process_interval = Column(Integer, default=30, nullable=False) # minutes
|
||||
|
||||
# Customization
|
||||
summary_template = Column(Text, nullable=True)
|
||||
custom_settings = Column(JSON, nullable=True) # Additional custom settings
|
||||
|
||||
# Metadata
|
||||
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
|
||||
last_used_at = Column(DateTime, nullable=True)
|
||||
|
||||
# Relationships
|
||||
user = relationship("User", foreign_keys=[user_id])
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
Index("idx_zammad_config_user_active", "user_id", "is_active"),
|
||||
Index("idx_zammad_config_user_default", "user_id", "is_default"),
|
||||
)
|
||||
|
||||
def to_dict(self, include_sensitive: bool = False) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for API responses"""
|
||||
result = {
|
||||
"id": self.id,
|
||||
"user_id": self.user_id,
|
||||
"name": self.name,
|
||||
"description": self.description,
|
||||
"is_default": self.is_default,
|
||||
"is_active": self.is_active,
|
||||
"zammad_url": self.zammad_url,
|
||||
"chatbot_id": self.chatbot_id,
|
||||
"process_state": self.process_state,
|
||||
"max_tickets": self.max_tickets,
|
||||
"skip_existing": self.skip_existing,
|
||||
"auto_process": self.auto_process,
|
||||
"process_interval": self.process_interval,
|
||||
"summary_template": self.summary_template,
|
||||
"custom_settings": self.custom_settings,
|
||||
"created_at": self.created_at.isoformat(),
|
||||
"updated_at": self.updated_at.isoformat(),
|
||||
"last_used_at": self.last_used_at.isoformat() if self.last_used_at else None
|
||||
}
|
||||
|
||||
if include_sensitive:
|
||||
result["api_token_encrypted"] = self.api_token_encrypted
|
||||
|
||||
return result
|
||||
@@ -1,72 +0,0 @@
|
||||
name: zammad
|
||||
version: 1.0.0
|
||||
description: "AI-powered ticket summarization for Zammad ticketing system"
|
||||
author: "Enclava Team"
|
||||
category: "integration"
|
||||
|
||||
# Module lifecycle
|
||||
enabled: true
|
||||
auto_start: true
|
||||
dependencies:
|
||||
- chatbot
|
||||
optional_dependencies: []
|
||||
|
||||
# Module capabilities
|
||||
provides:
|
||||
- "ticket_summarization"
|
||||
- "zammad_integration"
|
||||
- "batch_processing"
|
||||
|
||||
consumes:
|
||||
- "chatbot_completion"
|
||||
- "llm_completion"
|
||||
|
||||
# API endpoints
|
||||
endpoints:
|
||||
- path: "/zammad/configurations"
|
||||
method: "GET"
|
||||
description: "List Zammad configurations"
|
||||
|
||||
- path: "/zammad/configurations"
|
||||
method: "POST"
|
||||
description: "Create new Zammad configuration"
|
||||
|
||||
- path: "/zammad/test-connection"
|
||||
method: "POST"
|
||||
description: "Test Zammad connection"
|
||||
|
||||
- path: "/zammad/process"
|
||||
method: "POST"
|
||||
description: "Process tickets for summarization"
|
||||
|
||||
- path: "/zammad/status"
|
||||
method: "GET"
|
||||
description: "Get processing status and statistics"
|
||||
|
||||
# UI Configuration
|
||||
ui_config:
|
||||
icon: "ticket"
|
||||
color: "#3B82F6"
|
||||
category: "Integration"
|
||||
|
||||
# Permissions
|
||||
permissions:
|
||||
- name: "zammad.read"
|
||||
description: "Read Zammad tickets and configurations"
|
||||
|
||||
- name: "zammad.write"
|
||||
description: "Create and update Zammad ticket summaries"
|
||||
|
||||
- name: "zammad.configure"
|
||||
description: "Configure Zammad integration settings"
|
||||
|
||||
- name: "chatbot.use"
|
||||
description: "Use chatbot for AI summarization"
|
||||
|
||||
# Health checks
|
||||
health_checks:
|
||||
- name: "zammad_connectivity"
|
||||
description: "Check Zammad API connection"
|
||||
|
||||
- name: "chatbot_availability"
|
||||
description: "Check chatbot module availability"
|
||||
@@ -1,206 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Ollama Integration Test
|
||||
Tests that Ollama models work properly through the LiteLLM proxy
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import json
|
||||
import time
|
||||
|
||||
# Ollama models from litellm_config.yaml
|
||||
OLLAMA_MODELS = [
|
||||
"ollama-llama-3.1-nemotron",
|
||||
"ollama-mistral-nemo",
|
||||
"ollama-gemini-2.0-flash",
|
||||
"ollama-qwen3-235b",
|
||||
"ollama-deepseek-r1",
|
||||
"ollama-mistral-small",
|
||||
"ollama-gemini-2.5-pro"
|
||||
]
|
||||
|
||||
async def test_ollama_integration():
|
||||
async with aiohttp.ClientSession() as session:
|
||||
try:
|
||||
# Register and login a test user
|
||||
timestamp = int(time.time())
|
||||
user_data = {
|
||||
"email": f"ollamatest{timestamp}@example.com",
|
||||
"password": "TestPassword123!",
|
||||
"username": f"ollamatest{timestamp}"
|
||||
}
|
||||
|
||||
print("🚀 Starting Ollama Integration Test")
|
||||
print("=" * 50)
|
||||
|
||||
# Register user
|
||||
async with session.post("http://localhost:58000/api/v1/auth/register", json=user_data) as response:
|
||||
if response.status != 201:
|
||||
error_data = await response.json()
|
||||
print(f"❌ User registration failed: {error_data}")
|
||||
return
|
||||
print("✅ User registered successfully")
|
||||
|
||||
# Login
|
||||
login_data = {"email": user_data["email"], "password": user_data["password"]}
|
||||
async with session.post("http://localhost:58000/api/v1/auth/login", json=login_data) as response:
|
||||
if response.status != 200:
|
||||
error_data = await response.json()
|
||||
print(f"❌ Login failed: {error_data}")
|
||||
return
|
||||
|
||||
login_result = await response.json()
|
||||
token = login_result['access_token']
|
||||
headers = {'Authorization': f'Bearer {token}'}
|
||||
print("✅ Login successful")
|
||||
|
||||
# Test 1: Check if Ollama models are listed
|
||||
print("\n📋 Testing model availability...")
|
||||
async with session.get("http://localhost:58000/api/v1/llm/models", headers=headers) as response:
|
||||
if response.status == 200:
|
||||
models_data = await response.json()
|
||||
available_models = [model.get('id', '') for model in models_data.get('data', [])]
|
||||
|
||||
ollama_available = [model for model in OLLAMA_MODELS if model in available_models]
|
||||
print(f"✅ Total models available: {len(available_models)}")
|
||||
print(f"✅ Ollama models available: {len(ollama_available)}")
|
||||
|
||||
if not ollama_available:
|
||||
print("❌ No Ollama models found in model list")
|
||||
return
|
||||
|
||||
for model in ollama_available:
|
||||
print(f" • {model}")
|
||||
else:
|
||||
error_data = await response.json()
|
||||
print(f"❌ Failed to get models: {error_data}")
|
||||
return
|
||||
|
||||
# Test 2: Test chat completions with each available Ollama model
|
||||
print(f"\n💬 Testing chat completions...")
|
||||
successful_models = []
|
||||
failed_models = []
|
||||
|
||||
test_messages = [
|
||||
{"role": "user", "content": "Say 'Hello from Ollama!' and nothing else."}
|
||||
]
|
||||
|
||||
for model in ollama_available[:3]: # Test first 3 models to avoid timeout
|
||||
print(f"\n🤖 Testing model: {model}")
|
||||
|
||||
chat_data = {
|
||||
"model": model,
|
||||
"messages": test_messages,
|
||||
"max_tokens": 50,
|
||||
"temperature": 0.1
|
||||
}
|
||||
|
||||
try:
|
||||
async with session.post(
|
||||
"http://localhost:58000/api/v1/llm/chat/completions",
|
||||
json=chat_data,
|
||||
headers=headers,
|
||||
timeout=aiohttp.ClientTimeout(total=30)
|
||||
) as chat_response:
|
||||
if chat_response.status == 200:
|
||||
chat_result = await chat_response.json()
|
||||
message = chat_result.get("choices", [{}])[0].get("message", {}).get("content", "")
|
||||
print(f" ✅ Response: {message.strip()[:100]}...")
|
||||
successful_models.append(model)
|
||||
else:
|
||||
error_data = await chat_response.json()
|
||||
print(f" ❌ Failed (HTTP {chat_response.status}): {error_data.get('detail', 'Unknown error')}")
|
||||
failed_models.append(model)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
print(f" ⏰ Timeout - model may be loading or unavailable")
|
||||
failed_models.append(model)
|
||||
except Exception as e:
|
||||
print(f" ❌ Error: {str(e)}")
|
||||
failed_models.append(model)
|
||||
|
||||
# Small delay between requests
|
||||
await asyncio.sleep(1)
|
||||
|
||||
# Test 3: Test streaming response (if supported)
|
||||
print(f"\n🌊 Testing streaming response...")
|
||||
if successful_models:
|
||||
test_model = successful_models[0]
|
||||
stream_data = {
|
||||
"model": test_model,
|
||||
"messages": [{"role": "user", "content": "Count from 1 to 3, one number per line."}],
|
||||
"max_tokens": 20,
|
||||
"stream": True
|
||||
}
|
||||
|
||||
try:
|
||||
async with session.post(
|
||||
"http://localhost:58000/api/v1/llm/chat/completions",
|
||||
json=stream_data,
|
||||
headers=headers,
|
||||
timeout=aiohttp.ClientTimeout(total=20)
|
||||
) as stream_response:
|
||||
if stream_response.status == 200:
|
||||
content = await stream_response.text()
|
||||
if "data:" in content:
|
||||
print(f" ✅ Streaming response received (partial): {content[:100]}...")
|
||||
else:
|
||||
print(f" ℹ️ Non-streaming response: {content[:100]}...")
|
||||
else:
|
||||
error_data = await stream_response.json()
|
||||
print(f" ❌ Streaming failed: {error_data}")
|
||||
except Exception as e:
|
||||
print(f" ❌ Streaming error: {str(e)}")
|
||||
|
||||
# Test 4: Test model with different parameters
|
||||
print(f"\n⚙️ Testing model parameters...")
|
||||
if successful_models:
|
||||
test_model = successful_models[0]
|
||||
param_tests = [
|
||||
{"temperature": 0.0, "max_tokens": 10},
|
||||
{"temperature": 0.8, "max_tokens": 30},
|
||||
]
|
||||
|
||||
for i, params in enumerate(param_tests):
|
||||
chat_data = {
|
||||
"model": test_model,
|
||||
"messages": [{"role": "user", "content": f"Test {i+1}: Say hello briefly."}],
|
||||
**params
|
||||
}
|
||||
|
||||
try:
|
||||
async with session.post(
|
||||
"http://localhost:58000/api/v1/llm/chat/completions",
|
||||
json=chat_data,
|
||||
headers=headers,
|
||||
timeout=aiohttp.ClientTimeout(total=15)
|
||||
) as response:
|
||||
if response.status == 200:
|
||||
result = await response.json()
|
||||
message = result.get("choices", [{}])[0].get("message", {}).get("content", "")
|
||||
print(f" ✅ Params {params}: {message.strip()[:50]}...")
|
||||
else:
|
||||
print(f" ❌ Params test failed: HTTP {response.status}")
|
||||
except Exception as e:
|
||||
print(f" ❌ Parameters test error: {str(e)}")
|
||||
|
||||
# Summary
|
||||
print(f"\n📊 Test Summary")
|
||||
print("=" * 50)
|
||||
print(f"✅ Successful models: {len(successful_models)}")
|
||||
for model in successful_models:
|
||||
print(f" • {model}")
|
||||
|
||||
if failed_models:
|
||||
print(f"❌ Failed models: {len(failed_models)}")
|
||||
for model in failed_models:
|
||||
print(f" • {model}")
|
||||
|
||||
print(f"\n{'🎉 Ollama integration working!' if successful_models else '⚠️ Ollama integration has issues'}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Test error: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(test_ollama_integration())
|
||||
@@ -26,8 +26,6 @@ services:
|
||||
- DATABASE_URL=postgresql://enclava_user:enclava_pass@enclava-postgres:5432/enclava_db
|
||||
- REDIS_URL=redis://enclava-redis:6379
|
||||
- QDRANT_HOST=enclava-qdrant
|
||||
- LITELLM_BASE_URL=http://litellm-proxy:4000
|
||||
- LITELLM_MASTER_KEY=${LITELLM_MASTER_KEY:-enclava-master-key}
|
||||
- JWT_SECRET=${JWT_SECRET:-your-jwt-secret-here}
|
||||
- PRIVATEMODE_API_KEY=${PRIVATEMODE_API_KEY:-}
|
||||
- ADMIN_USER=${ADMIN_USER:-admin}
|
||||
@@ -44,6 +42,7 @@ services:
|
||||
volumes:
|
||||
- ./backend:/app
|
||||
- ./logs:/app/logs
|
||||
- ./plugins:/plugins
|
||||
networks:
|
||||
- enclava-net
|
||||
restart: unless-stopped
|
||||
|
||||
@@ -6,6 +6,7 @@ import { Toaster } from '@/components/ui/toaster'
|
||||
import { Toaster as HotToaster } from 'react-hot-toast'
|
||||
import { AuthProvider } from '@/contexts/AuthContext'
|
||||
import { ModulesProvider } from '@/contexts/ModulesContext'
|
||||
import { PluginProvider } from '@/contexts/PluginContext'
|
||||
import { Navigation } from '@/components/ui/navigation'
|
||||
|
||||
const inter = Inter({ subsets: ['latin'] })
|
||||
@@ -53,6 +54,7 @@ export default function RootLayout({
|
||||
>
|
||||
<AuthProvider>
|
||||
<ModulesProvider>
|
||||
<PluginProvider>
|
||||
<div className="min-h-screen bg-background">
|
||||
<Navigation />
|
||||
<main className="container mx-auto px-4 py-8">
|
||||
@@ -61,6 +63,7 @@ export default function RootLayout({
|
||||
</div>
|
||||
<Toaster />
|
||||
<HotToaster />
|
||||
</PluginProvider>
|
||||
</ModulesProvider>
|
||||
</AuthProvider>
|
||||
</ThemeProvider>
|
||||
|
||||
@@ -9,6 +9,7 @@ import { Badge } from "@/components/ui/badge"
|
||||
import { ThemeToggle } from "@/components/ui/theme-toggle"
|
||||
import { useAuth } from "@/contexts/AuthContext"
|
||||
import { useModules } from "@/contexts/ModulesContext"
|
||||
import { usePlugin } from "@/contexts/PluginContext"
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
@@ -31,6 +32,34 @@ const Navigation = () => {
|
||||
const pathname = usePathname()
|
||||
const { user, logout } = useAuth()
|
||||
const { isModuleEnabled } = useModules()
|
||||
const { installedPlugins, getPluginPages } = usePlugin()
|
||||
|
||||
// Get plugin navigation items
|
||||
const pluginNavItems = installedPlugins
|
||||
.filter(plugin => plugin.status === 'enabled' && plugin.loaded)
|
||||
.map(plugin => {
|
||||
const pages = getPluginPages(plugin.id);
|
||||
if (pages.length === 0) return null;
|
||||
|
||||
if (pages.length === 1) {
|
||||
// Single page plugin
|
||||
return {
|
||||
href: `/plugins/${plugin.id}${pages[0].path}`,
|
||||
label: plugin.name
|
||||
};
|
||||
} else {
|
||||
// Multi-page plugin
|
||||
return {
|
||||
href: `/plugins/${plugin.id}`,
|
||||
label: plugin.name,
|
||||
children: pages.map(page => ({
|
||||
href: `/plugins/${plugin.id}${page.path}`,
|
||||
label: page.title || page.name
|
||||
}))
|
||||
};
|
||||
}
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
// Core navigation items that are always visible
|
||||
const coreNavItems = [
|
||||
@@ -49,6 +78,7 @@ const Navigation = () => {
|
||||
children: [
|
||||
{ href: "/settings", label: "System Settings" },
|
||||
{ href: "/modules", label: "Modules" },
|
||||
{ href: "/plugins", label: "Plugins" },
|
||||
{ href: "/prompt-templates", label: "Prompt Templates" },
|
||||
]
|
||||
},
|
||||
@@ -59,8 +89,8 @@ const Navigation = () => {
|
||||
.filter(([moduleName]) => isModuleEnabled(moduleName))
|
||||
.map(([, navItem]) => navItem)
|
||||
|
||||
// Combine core and module-based navigation items
|
||||
const navItems = [...coreNavItems, ...moduleNavItems]
|
||||
// Combine core, module-based, and plugin navigation items
|
||||
const navItems = [...coreNavItems, ...moduleNavItems, ...pluginNavItems]
|
||||
|
||||
return (
|
||||
<header className="sticky top-0 z-50 w-full border-b bg-background/95 backdrop-blur supports-[backdrop-filter]:bg-background/60">
|
||||
|
||||
@@ -12,6 +12,7 @@ interface User {
|
||||
|
||||
interface AuthContextType {
|
||||
user: User | null
|
||||
token: string | null
|
||||
login: (email: string, password: string) => Promise<void>
|
||||
logout: () => void
|
||||
isLoading: boolean
|
||||
@@ -21,6 +22,7 @@ const AuthContext = createContext<AuthContextType | undefined>(undefined)
|
||||
|
||||
export function AuthProvider({ children }: { children: ReactNode }) {
|
||||
const [user, setUser] = useState<User | null>(null)
|
||||
const [token, setToken] = useState<string | null>(null)
|
||||
const [isLoading, setIsLoading] = useState(true)
|
||||
const [isMounted, setIsMounted] = useState(false)
|
||||
const router = useRouter()
|
||||
@@ -30,8 +32,8 @@ export function AuthProvider({ children }: { children: ReactNode }) {
|
||||
|
||||
// Check for existing session on mount (client-side only)
|
||||
if (typeof window !== "undefined") {
|
||||
const token = localStorage.getItem("token")
|
||||
if (token) {
|
||||
const storedToken = localStorage.getItem("token")
|
||||
if (storedToken) {
|
||||
// In a real app, validate the token with the backend
|
||||
// For now, just set a demo user - also handle both email domains
|
||||
setUser({
|
||||
@@ -40,8 +42,11 @@ export function AuthProvider({ children }: { children: ReactNode }) {
|
||||
name: "Admin User",
|
||||
role: "admin"
|
||||
})
|
||||
// Ensure we have a fresh token
|
||||
localStorage.setItem("token", "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxIiwiZW1haWwiOiJhZG1pbkBleGFtcGxlLmNvbSIsImlzX3N1cGVydXNlciI6dHJ1ZSwicm9sZSI6InN1cGVyX2FkbWluIiwiZXhwIjoxNzU2MDEzNjk5fQ.qcpQfqO8E-0qQpla1nMwHUGF0Th25GLpmqGW5LO2I70")
|
||||
setToken(storedToken)
|
||||
// Ensure we have a fresh token with extended expiration
|
||||
const freshToken = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxIiwiZW1haWwiOiJhZG1pbkBleGFtcGxlLmNvbSIsImlzX3N1cGVydXNlciI6dHJ1ZSwicm9sZSI6InN1cGVyX2FkbWluIiwiZXhwIjoxNzg3Mzg5NjM3fQ.DKAx-rpNvrlRxb0YG1C63QWDvH63pIAsi8QniFvDXmc"
|
||||
localStorage.setItem("token", freshToken)
|
||||
setToken(freshToken)
|
||||
}
|
||||
}
|
||||
setIsLoading(false)
|
||||
@@ -60,10 +65,13 @@ export function AuthProvider({ children }: { children: ReactNode }) {
|
||||
role: "admin"
|
||||
}
|
||||
|
||||
const authToken = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxIiwiZW1haWwiOiJhZG1pbkBleGFtcGxlLmNvbSIsImlzX3N1cGVydXNlciI6dHJ1ZSwicm9sZSI6InN1cGVyX2FkbWluIiwiZXhwIjoxNzg3Mzg5NjM3fQ.DKAx-rpNvrlRxb0YG1C63QWDvH63pIAsi8QniFvDXmc"
|
||||
|
||||
setUser(demoUser)
|
||||
setToken(authToken)
|
||||
if (typeof window !== "undefined") {
|
||||
// Use the actual JWT token for API calls
|
||||
localStorage.setItem("token", "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxIiwiZW1haWwiOiJhZG1pbkBleGFtcGxlLmNvbSIsImlzX3N1cGVydXNlciI6dHJ1ZSwicm9sZSI6InN1cGVyX2FkbWluIiwiZXhwIjoxNzU2MDEzNjk5fQ.qcpQfqO8E-0qQpla1nMwHUGF0Th25GLpmqGW5LO2I70")
|
||||
localStorage.setItem("token", authToken)
|
||||
localStorage.setItem("user", JSON.stringify(demoUser))
|
||||
}
|
||||
} else {
|
||||
@@ -78,6 +86,7 @@ export function AuthProvider({ children }: { children: ReactNode }) {
|
||||
|
||||
const logout = () => {
|
||||
setUser(null)
|
||||
setToken(null)
|
||||
if (typeof window !== "undefined") {
|
||||
localStorage.removeItem("token")
|
||||
localStorage.removeItem("user")
|
||||
@@ -86,7 +95,7 @@ export function AuthProvider({ children }: { children: ReactNode }) {
|
||||
}
|
||||
|
||||
return (
|
||||
<AuthContext.Provider value={{ user, login, logout, isLoading }}>
|
||||
<AuthContext.Provider value={{ user, token, login, logout, isLoading }}>
|
||||
{children}
|
||||
</AuthContext.Provider>
|
||||
)
|
||||
@@ -99,6 +108,7 @@ export function useAuth() {
|
||||
if (typeof window === "undefined") {
|
||||
return {
|
||||
user: null,
|
||||
token: null,
|
||||
login: async () => {},
|
||||
logout: () => {},
|
||||
isLoading: true
|
||||
|
||||
Reference in New Issue
Block a user