mirror of
https://github.com/aljazceru/enclava.git
synced 2025-12-17 07:24:34 +01:00
plugin system
This commit is contained in:
725
backend/alembic/versions/000_consolidated_ground_truth_schema.py
Normal file
725
backend/alembic/versions/000_consolidated_ground_truth_schema.py
Normal file
@@ -0,0 +1,725 @@
|
|||||||
|
"""Consolidated ground truth database schema
|
||||||
|
|
||||||
|
This migration represents the complete, accurate database schema based on the actual
|
||||||
|
model files in the codebase. All legacy migrations have been consolidated into this
|
||||||
|
single migration to ensure the database matches what the models expect.
|
||||||
|
|
||||||
|
Revision ID: 000_consolidated_ground_truth_schema
|
||||||
|
Revises:
|
||||||
|
Create Date: 2025-08-22 10:30:00.000000
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '000_ground_truth'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Create the complete database schema based on actual model definitions"""
|
||||||
|
|
||||||
|
# Create WorkflowStatus enum using raw SQL to avoid SQLAlchemy conflicts
|
||||||
|
op.execute("CREATE TYPE workflowstatus AS ENUM ('PENDING', 'RUNNING', 'COMPLETED', 'FAILED', 'CANCELLED')")
|
||||||
|
|
||||||
|
# Define the enum for use in table definitions
|
||||||
|
workflow_status_enum = postgresql.ENUM(
|
||||||
|
'PENDING', 'RUNNING', 'COMPLETED', 'FAILED', 'CANCELLED',
|
||||||
|
name='workflowstatus',
|
||||||
|
create_type=False # Don't auto-create since we created it above
|
||||||
|
)
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# CORE USER MANAGEMENT
|
||||||
|
# ========================================
|
||||||
|
|
||||||
|
# Create users table
|
||||||
|
op.create_table('users',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('email', sa.String(), nullable=False),
|
||||||
|
sa.Column('username', sa.String(), nullable=False),
|
||||||
|
sa.Column('hashed_password', sa.String(), nullable=False),
|
||||||
|
sa.Column('full_name', sa.String(), nullable=True),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=True, default=True),
|
||||||
|
sa.Column('is_superuser', sa.Boolean(), nullable=True, default=False),
|
||||||
|
sa.Column('is_verified', sa.Boolean(), nullable=True, default=False),
|
||||||
|
sa.Column('role', sa.String(), nullable=True, default="user"),
|
||||||
|
sa.Column('permissions', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('avatar_url', sa.String(), nullable=True),
|
||||||
|
sa.Column('bio', sa.Text(), nullable=True),
|
||||||
|
sa.Column('company', sa.String(), nullable=True),
|
||||||
|
sa.Column('website', sa.String(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('last_login', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('preferences', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('notification_settings', sa.JSON(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
|
||||||
|
op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# API KEY MANAGEMENT
|
||||||
|
# ========================================
|
||||||
|
|
||||||
|
# Create api_keys table (based on actual model)
|
||||||
|
op.create_table('api_keys',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(), nullable=False),
|
||||||
|
sa.Column('key_hash', sa.String(), nullable=False),
|
||||||
|
sa.Column('key_prefix', sa.String(), nullable=False),
|
||||||
|
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=True, default=True),
|
||||||
|
sa.Column('permissions', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('scopes', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('rate_limit_per_minute', sa.Integer(), nullable=True, default=60),
|
||||||
|
sa.Column('rate_limit_per_hour', sa.Integer(), nullable=True, default=3600),
|
||||||
|
sa.Column('rate_limit_per_day', sa.Integer(), nullable=True, default=86400),
|
||||||
|
sa.Column('allowed_models', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('allowed_endpoints', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('allowed_ips', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('allowed_chatbots', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('is_unlimited', sa.Boolean(), nullable=True, default=True),
|
||||||
|
sa.Column('budget_limit_cents', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('budget_type', sa.String(), nullable=True),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('tags', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('last_used_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('expires_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('total_requests', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('total_tokens', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('total_cost', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_api_keys_id'), 'api_keys', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_api_keys_key_hash'), 'api_keys', ['key_hash'], unique=True)
|
||||||
|
op.create_index(op.f('ix_api_keys_key_prefix'), 'api_keys', ['key_prefix'], unique=False)
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# BUDGET & USAGE TRACKING
|
||||||
|
# ========================================
|
||||||
|
|
||||||
|
# Create budgets table (based on actual model)
|
||||||
|
op.create_table('budgets',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(), nullable=False),
|
||||||
|
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('api_key_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('limit_cents', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('warning_threshold_cents', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('period_type', sa.String(), nullable=False, default="monthly"),
|
||||||
|
sa.Column('period_start', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('period_end', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('current_usage_cents', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=True, default=True),
|
||||||
|
sa.Column('is_exceeded', sa.Boolean(), nullable=True, default=False),
|
||||||
|
sa.Column('is_warning_sent', sa.Boolean(), nullable=True, default=False),
|
||||||
|
sa.Column('enforce_hard_limit', sa.Boolean(), nullable=True, default=True),
|
||||||
|
sa.Column('enforce_warning', sa.Boolean(), nullable=True, default=True),
|
||||||
|
sa.Column('allowed_models', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('allowed_endpoints', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('tags', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('currency', sa.String(), nullable=True, default="USD"),
|
||||||
|
sa.Column('auto_renew', sa.Boolean(), nullable=True, default=True),
|
||||||
|
sa.Column('rollover_unused', sa.Boolean(), nullable=True, default=False),
|
||||||
|
sa.Column('notification_settings', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('last_reset_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['api_key_id'], ['api_keys.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_budgets_id'), 'budgets', ['id'], unique=False)
|
||||||
|
|
||||||
|
# Create usage_tracking table (based on actual model)
|
||||||
|
op.create_table('usage_tracking',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('api_key_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('budget_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('endpoint', sa.String(), nullable=False),
|
||||||
|
sa.Column('method', sa.String(), nullable=False),
|
||||||
|
sa.Column('model', sa.String(), nullable=True),
|
||||||
|
sa.Column('request_tokens', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('response_tokens', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('total_tokens', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('cost_cents', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('cost_currency', sa.String(), nullable=True, default="USD"),
|
||||||
|
sa.Column('response_time_ms', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('status_code', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('request_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('session_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('ip_address', sa.String(), nullable=True),
|
||||||
|
sa.Column('user_agent', sa.String(), nullable=True),
|
||||||
|
sa.Column('request_metadata', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['api_key_id'], ['api_keys.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['budget_id'], ['budgets.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_usage_tracking_id'), 'usage_tracking', ['id'], unique=False)
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# AUDIT SYSTEM
|
||||||
|
# ========================================
|
||||||
|
|
||||||
|
# Create audit_logs table (based on actual model)
|
||||||
|
op.create_table('audit_logs',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('user_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('action', sa.String(), nullable=False),
|
||||||
|
sa.Column('resource_type', sa.String(), nullable=False),
|
||||||
|
sa.Column('resource_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('description', sa.Text(), nullable=False),
|
||||||
|
sa.Column('details', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('ip_address', sa.String(), nullable=True),
|
||||||
|
sa.Column('user_agent', sa.String(), nullable=True),
|
||||||
|
sa.Column('session_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('request_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('severity', sa.String(), nullable=True, default="low"),
|
||||||
|
sa.Column('category', sa.String(), nullable=True),
|
||||||
|
sa.Column('success', sa.Boolean(), nullable=True, default=True),
|
||||||
|
sa.Column('error_message', sa.Text(), nullable=True),
|
||||||
|
sa.Column('tags', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('metadata', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('old_values', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('new_values', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_audit_logs_id'), 'audit_logs', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_audit_logs_created_at'), 'audit_logs', ['created_at'], unique=False)
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# RAG SYSTEM
|
||||||
|
# ========================================
|
||||||
|
|
||||||
|
# Create rag_collections table (based on actual model)
|
||||||
|
op.create_table('rag_collections',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('qdrant_collection_name', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('document_count', sa.Integer(), nullable=False, default=0),
|
||||||
|
sa.Column('size_bytes', sa.BigInteger(), nullable=False, default=0),
|
||||||
|
sa.Column('vector_count', sa.Integer(), nullable=False, default=0),
|
||||||
|
sa.Column('status', sa.String(length=50), nullable=False, default='active'),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=False, default=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_rag_collections_id'), 'rag_collections', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_rag_collections_name'), 'rag_collections', ['name'], unique=False)
|
||||||
|
op.create_index(op.f('ix_rag_collections_qdrant_collection_name'), 'rag_collections', ['qdrant_collection_name'], unique=True)
|
||||||
|
|
||||||
|
# Create rag_documents table (based on actual model)
|
||||||
|
op.create_table('rag_documents',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('collection_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('filename', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('original_filename', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('file_path', sa.String(length=500), nullable=False),
|
||||||
|
sa.Column('file_type', sa.String(length=50), nullable=False),
|
||||||
|
sa.Column('file_size', sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column('mime_type', sa.String(length=100), nullable=True),
|
||||||
|
sa.Column('status', sa.String(length=50), nullable=False, default='processing'),
|
||||||
|
sa.Column('processing_error', sa.Text(), nullable=True),
|
||||||
|
sa.Column('converted_content', sa.Text(), nullable=True),
|
||||||
|
sa.Column('word_count', sa.Integer(), nullable=False, default=0),
|
||||||
|
sa.Column('character_count', sa.Integer(), nullable=False, default=0),
|
||||||
|
sa.Column('vector_count', sa.Integer(), nullable=False, default=0),
|
||||||
|
sa.Column('chunk_size', sa.Integer(), nullable=False, default=1000),
|
||||||
|
sa.Column('document_metadata', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||||
|
sa.Column('processed_at', sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column('indexed_at', sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||||
|
sa.Column('is_deleted', sa.Boolean(), nullable=False, default=False),
|
||||||
|
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['collection_id'], ['rag_collections.id'], ondelete='CASCADE'),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_rag_documents_id'), 'rag_documents', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_rag_documents_collection_id'), 'rag_documents', ['collection_id'], unique=False)
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# CHATBOT SYSTEM (String IDs + JSON config)
|
||||||
|
# ========================================
|
||||||
|
|
||||||
|
# Create chatbot_instances table (based on actual model - String IDs)
|
||||||
|
op.create_table('chatbot_instances',
|
||||||
|
sa.Column('id', sa.String(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('config', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('created_by', sa.String(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=True, default=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create chatbot_conversations table (based on actual model - String IDs)
|
||||||
|
op.create_table('chatbot_conversations',
|
||||||
|
sa.Column('id', sa.String(), nullable=False),
|
||||||
|
sa.Column('chatbot_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('user_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('title', sa.String(length=255), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=True, default=True),
|
||||||
|
sa.Column('context_data', sa.JSON(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['chatbot_id'], ['chatbot_instances.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create chatbot_messages table (based on actual model - String IDs)
|
||||||
|
op.create_table('chatbot_messages',
|
||||||
|
sa.Column('id', sa.String(), nullable=False),
|
||||||
|
sa.Column('conversation_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('role', sa.String(length=20), nullable=False),
|
||||||
|
sa.Column('content', sa.Text(), nullable=False),
|
||||||
|
sa.Column('timestamp', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('message_metadata', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('sources', sa.JSON(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['conversation_id'], ['chatbot_conversations.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create chatbot_analytics table (based on actual model)
|
||||||
|
op.create_table('chatbot_analytics',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('chatbot_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('user_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('event_type', sa.String(length=50), nullable=False),
|
||||||
|
sa.Column('event_data', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('response_time_ms', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('token_count', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('cost_cents', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('model_used', sa.String(length=100), nullable=True),
|
||||||
|
sa.Column('rag_used', sa.Boolean(), nullable=True, default=False),
|
||||||
|
sa.Column('timestamp', sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['chatbot_id'], ['chatbot_instances.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# PROMPT TEMPLATE SYSTEM (String IDs)
|
||||||
|
# ========================================
|
||||||
|
|
||||||
|
# Create prompt_templates table (based on actual model - String IDs)
|
||||||
|
op.create_table('prompt_templates',
|
||||||
|
sa.Column('id', sa.String(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('type_key', sa.String(length=100), nullable=False),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('system_prompt', sa.Text(), nullable=False),
|
||||||
|
sa.Column('is_default', sa.Boolean(), nullable=False, default=True),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=False, default=True),
|
||||||
|
sa.Column('version', sa.Integer(), nullable=False, default=1),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||||
|
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_prompt_templates_id'), 'prompt_templates', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_prompt_templates_name'), 'prompt_templates', ['name'], unique=False)
|
||||||
|
op.create_index(op.f('ix_prompt_templates_type_key'), 'prompt_templates', ['type_key'], unique=True)
|
||||||
|
|
||||||
|
# Create prompt_variables table (based on actual model - String IDs)
|
||||||
|
op.create_table('prompt_variables',
|
||||||
|
sa.Column('id', sa.String(), nullable=False),
|
||||||
|
sa.Column('variable_name', sa.String(length=100), nullable=False),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('example_value', sa.String(length=500), nullable=True),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=False, default=True),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_prompt_variables_id'), 'prompt_variables', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_prompt_variables_variable_name'), 'prompt_variables', ['variable_name'], unique=True)
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# WORKFLOW SYSTEM (String IDs + Enum)
|
||||||
|
# ========================================
|
||||||
|
|
||||||
|
# Create workflow_definitions table (based on actual model - String IDs)
|
||||||
|
op.create_table('workflow_definitions',
|
||||||
|
sa.Column('id', sa.String(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('version', sa.String(length=50), nullable=True, default="1.0.0"),
|
||||||
|
sa.Column('steps', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('variables', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('metadata', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('timeout', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=True, default=True),
|
||||||
|
sa.Column('created_by', sa.String(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create workflow_executions table (based on actual model - String IDs + Enum)
|
||||||
|
op.create_table('workflow_executions',
|
||||||
|
sa.Column('id', sa.String(), nullable=False),
|
||||||
|
sa.Column('workflow_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('status', workflow_status_enum, nullable=True),
|
||||||
|
sa.Column('current_step', sa.String(), nullable=True),
|
||||||
|
sa.Column('input_data', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('context', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('results', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('error', sa.Text(), nullable=True),
|
||||||
|
sa.Column('started_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('completed_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('executed_by', sa.String(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['workflow_id'], ['workflow_definitions.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create workflow_step_logs table (based on actual model - String IDs)
|
||||||
|
op.create_table('workflow_step_logs',
|
||||||
|
sa.Column('id', sa.String(), nullable=False),
|
||||||
|
sa.Column('execution_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('step_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('step_name', sa.String(length=255), nullable=False),
|
||||||
|
sa.Column('step_type', sa.String(length=50), nullable=False),
|
||||||
|
sa.Column('status', sa.String(length=50), nullable=False),
|
||||||
|
sa.Column('input_data', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('output_data', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('error', sa.Text(), nullable=True),
|
||||||
|
sa.Column('started_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('completed_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('duration_ms', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('retry_count', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['execution_id'], ['workflow_executions.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# MODULE SYSTEM
|
||||||
|
# ========================================
|
||||||
|
|
||||||
|
# Create modules table (based on actual model)
|
||||||
|
op.create_table('modules',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(), nullable=False),
|
||||||
|
sa.Column('display_name', sa.String(), nullable=False),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('module_type', sa.String(), nullable=True, default="custom"),
|
||||||
|
sa.Column('category', sa.String(), nullable=True),
|
||||||
|
sa.Column('version', sa.String(), nullable=False),
|
||||||
|
sa.Column('author', sa.String(), nullable=True),
|
||||||
|
sa.Column('license', sa.String(), nullable=True),
|
||||||
|
sa.Column('status', sa.String(), nullable=True, default="inactive"),
|
||||||
|
sa.Column('is_enabled', sa.Boolean(), nullable=True, default=False),
|
||||||
|
sa.Column('is_core', sa.Boolean(), nullable=True, default=False),
|
||||||
|
sa.Column('config_schema', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('config_values', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('default_config', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('dependencies', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('conflicts', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('install_path', sa.String(), nullable=True),
|
||||||
|
sa.Column('entry_point', sa.String(), nullable=True),
|
||||||
|
sa.Column('interceptor_chains', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('execution_order', sa.Integer(), nullable=True, default=100),
|
||||||
|
sa.Column('api_endpoints', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('required_permissions', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('security_level', sa.String(), nullable=True, default="low"),
|
||||||
|
sa.Column('tags', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('module_metadata', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('last_error', sa.Text(), nullable=True),
|
||||||
|
sa.Column('error_count', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('last_started', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('last_stopped', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('request_count', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('success_count', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('error_count_runtime', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('installed_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_modules_id'), 'modules', ['id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_modules_name'), 'modules', ['name'], unique=True)
|
||||||
|
|
||||||
|
# ========================================
|
||||||
|
# PLUGIN SYSTEM (UUID-based, comprehensive)
|
||||||
|
# ========================================
|
||||||
|
|
||||||
|
# Create plugins table (based on actual model - UUID)
|
||||||
|
op.create_table('plugins',
|
||||||
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=100), nullable=False),
|
||||||
|
sa.Column('slug', sa.String(length=100), nullable=False),
|
||||||
|
sa.Column('display_name', sa.String(length=200), nullable=False),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('version', sa.String(length=50), nullable=False),
|
||||||
|
sa.Column('author', sa.String(length=200), nullable=True),
|
||||||
|
sa.Column('homepage', sa.String(length=500), nullable=True),
|
||||||
|
sa.Column('repository', sa.String(length=500), nullable=True),
|
||||||
|
sa.Column('package_path', sa.String(length=500), nullable=False),
|
||||||
|
sa.Column('manifest_hash', sa.String(length=64), nullable=False),
|
||||||
|
sa.Column('package_hash', sa.String(length=64), nullable=False),
|
||||||
|
sa.Column('status', sa.String(length=20), nullable=False, default="installed"),
|
||||||
|
sa.Column('enabled', sa.Boolean(), nullable=False, default=False),
|
||||||
|
sa.Column('auto_enable', sa.Boolean(), nullable=False, default=False),
|
||||||
|
sa.Column('installed_at', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('enabled_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('last_updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('installed_by_user_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('manifest_data', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('config_schema', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('default_config', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('required_permissions', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('api_scopes', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('resource_limits', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('database_name', sa.String(length=100), nullable=True),
|
||||||
|
sa.Column('database_url', sa.String(length=1000), nullable=True),
|
||||||
|
sa.Column('last_error', sa.Text(), nullable=True),
|
||||||
|
sa.Column('error_count', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('last_error_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['installed_by_user_id'], ['users.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index('idx_plugin_status_enabled', 'plugins', ['status', 'enabled'], unique=False)
|
||||||
|
op.create_index('idx_plugin_user_status', 'plugins', ['installed_by_user_id', 'status'], unique=False)
|
||||||
|
op.create_index(op.f('ix_plugins_name'), 'plugins', ['name'], unique=True)
|
||||||
|
op.create_index(op.f('ix_plugins_slug'), 'plugins', ['slug'], unique=True)
|
||||||
|
op.create_index(op.f('ix_plugins_enabled'), 'plugins', ['enabled'], unique=False)
|
||||||
|
op.create_index(op.f('ix_plugins_status'), 'plugins', ['status'], unique=False)
|
||||||
|
op.create_unique_constraint(None, 'plugins', ['database_name'])
|
||||||
|
|
||||||
|
# Create plugin_configurations table (based on actual model - UUID)
|
||||||
|
op.create_table('plugin_configurations',
|
||||||
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('plugin_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=200), nullable=False),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('config_data', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('encrypted_data', sa.Text(), nullable=True),
|
||||||
|
sa.Column('schema_version', sa.String(length=50), nullable=True),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=False, default=False),
|
||||||
|
sa.Column('is_default', sa.Boolean(), nullable=False, default=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('created_by_user_id', sa.Integer(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['created_by_user_id'], ['users.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['plugin_id'], ['plugins.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index('idx_plugin_config_user_active', 'plugin_configurations', ['plugin_id', 'user_id', 'is_active'], unique=False)
|
||||||
|
|
||||||
|
# Create plugin_instances table (based on actual model - UUID)
|
||||||
|
op.create_table('plugin_instances',
|
||||||
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('plugin_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('configuration_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||||
|
sa.Column('instance_name', sa.String(length=200), nullable=False),
|
||||||
|
sa.Column('process_id', sa.String(length=100), nullable=True),
|
||||||
|
sa.Column('status', sa.String(length=20), nullable=False, default="starting"),
|
||||||
|
sa.Column('start_time', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('last_heartbeat', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('stop_time', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('restart_count', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('memory_usage_mb', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('cpu_usage_percent', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('health_status', sa.String(length=20), nullable=True, default="unknown"),
|
||||||
|
sa.Column('health_message', sa.Text(), nullable=True),
|
||||||
|
sa.Column('last_health_check', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('last_error', sa.Text(), nullable=True),
|
||||||
|
sa.Column('error_count', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.ForeignKeyConstraint(['configuration_id'], ['plugin_configurations.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['plugin_id'], ['plugins.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index('idx_plugin_instance_status', 'plugin_instances', ['plugin_id', 'status'], unique=False)
|
||||||
|
op.create_index(op.f('ix_plugin_instances_status'), 'plugin_instances', ['status'], unique=False)
|
||||||
|
|
||||||
|
# Create plugin_audit_logs table (based on actual model - UUID)
|
||||||
|
op.create_table('plugin_audit_logs',
|
||||||
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('plugin_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('instance_id', postgresql.UUID(as_uuid=True), nullable=True),
|
||||||
|
sa.Column('event_type', sa.String(length=50), nullable=False),
|
||||||
|
sa.Column('action', sa.String(length=100), nullable=False),
|
||||||
|
sa.Column('resource', sa.String(length=200), nullable=True),
|
||||||
|
sa.Column('user_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('api_key_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('ip_address', sa.String(length=45), nullable=True),
|
||||||
|
sa.Column('user_agent', sa.String(length=500), nullable=True),
|
||||||
|
sa.Column('request_data', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('response_status', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('response_data', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('duration_ms', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('success', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('error_message', sa.Text(), nullable=True),
|
||||||
|
sa.Column('timestamp', sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['api_key_id'], ['api_keys.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['instance_id'], ['plugin_instances.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['plugin_id'], ['plugins.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index('idx_plugin_audit_event_type', 'plugin_audit_logs', ['event_type', 'timestamp'], unique=False)
|
||||||
|
op.create_index('idx_plugin_audit_plugin_time', 'plugin_audit_logs', ['plugin_id', 'timestamp'], unique=False)
|
||||||
|
op.create_index('idx_plugin_audit_user_time', 'plugin_audit_logs', ['user_id', 'timestamp'], unique=False)
|
||||||
|
op.create_index(op.f('ix_plugin_audit_logs_event_type'), 'plugin_audit_logs', ['event_type'], unique=False)
|
||||||
|
op.create_index(op.f('ix_plugin_audit_logs_success'), 'plugin_audit_logs', ['success'], unique=False)
|
||||||
|
op.create_index(op.f('ix_plugin_audit_logs_timestamp'), 'plugin_audit_logs', ['timestamp'], unique=False)
|
||||||
|
|
||||||
|
# Create plugin_cron_jobs table (based on actual model - UUID)
|
||||||
|
op.create_table('plugin_cron_jobs',
|
||||||
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('plugin_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('job_name', sa.String(length=200), nullable=False),
|
||||||
|
sa.Column('job_id', sa.String(length=100), nullable=False),
|
||||||
|
sa.Column('schedule', sa.String(length=100), nullable=False),
|
||||||
|
sa.Column('timezone', sa.String(length=50), nullable=True, default="UTC"),
|
||||||
|
sa.Column('enabled', sa.Boolean(), nullable=False, default=True),
|
||||||
|
sa.Column('description', sa.Text(), nullable=True),
|
||||||
|
sa.Column('function_name', sa.String(length=200), nullable=False),
|
||||||
|
sa.Column('job_data', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('last_run_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('next_run_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('last_duration_ms', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('run_count', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('success_count', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('error_count', sa.Integer(), nullable=True, default=0),
|
||||||
|
sa.Column('last_error', sa.Text(), nullable=True),
|
||||||
|
sa.Column('last_error_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('max_retries', sa.Integer(), nullable=True, default=3),
|
||||||
|
sa.Column('retry_delay_seconds', sa.Integer(), nullable=True, default=60),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('created_by_user_id', sa.Integer(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['created_by_user_id'], ['users.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['plugin_id'], ['plugins.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index('idx_plugin_cron_next_run', 'plugin_cron_jobs', ['enabled', 'next_run_at'], unique=False)
|
||||||
|
op.create_index('idx_plugin_cron_plugin', 'plugin_cron_jobs', ['plugin_id', 'enabled'], unique=False)
|
||||||
|
op.create_index(op.f('ix_plugin_cron_jobs_job_id'), 'plugin_cron_jobs', ['job_id'], unique=True)
|
||||||
|
op.create_index(op.f('ix_plugin_cron_jobs_enabled'), 'plugin_cron_jobs', ['enabled'], unique=False)
|
||||||
|
op.create_index(op.f('ix_plugin_cron_jobs_next_run_at'), 'plugin_cron_jobs', ['next_run_at'], unique=False)
|
||||||
|
|
||||||
|
# Create plugin_api_gateways table (based on actual model - UUID)
|
||||||
|
op.create_table('plugin_api_gateways',
|
||||||
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('plugin_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('base_path', sa.String(length=200), nullable=False),
|
||||||
|
sa.Column('internal_url', sa.String(length=500), nullable=False),
|
||||||
|
sa.Column('require_authentication', sa.Boolean(), nullable=False, default=True),
|
||||||
|
sa.Column('allowed_methods', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('rate_limit_per_minute', sa.Integer(), nullable=True, default=60),
|
||||||
|
sa.Column('rate_limit_per_hour', sa.Integer(), nullable=True, default=1000),
|
||||||
|
sa.Column('cors_enabled', sa.Boolean(), nullable=False, default=True),
|
||||||
|
sa.Column('cors_origins', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('cors_methods', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('cors_headers', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('circuit_breaker_enabled', sa.Boolean(), nullable=False, default=True),
|
||||||
|
sa.Column('failure_threshold', sa.Integer(), nullable=True, default=5),
|
||||||
|
sa.Column('recovery_timeout_seconds', sa.Integer(), nullable=True, default=60),
|
||||||
|
sa.Column('enabled', sa.Boolean(), nullable=False, default=True),
|
||||||
|
sa.Column('last_health_check', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('health_status', sa.String(length=20), nullable=True, default="unknown"),
|
||||||
|
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['plugin_id'], ['plugins.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_plugin_api_gateways_enabled'), 'plugin_api_gateways', ['enabled'], unique=False)
|
||||||
|
op.create_unique_constraint(None, 'plugin_api_gateways', ['base_path'])
|
||||||
|
op.create_unique_constraint(None, 'plugin_api_gateways', ['plugin_id'])
|
||||||
|
|
||||||
|
# Create plugin_permissions table (based on actual model - UUID)
|
||||||
|
op.create_table('plugin_permissions',
|
||||||
|
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('plugin_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||||
|
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('permission_name', sa.String(length=200), nullable=False),
|
||||||
|
sa.Column('granted', sa.Boolean(), nullable=False, default=True),
|
||||||
|
sa.Column('granted_at', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('granted_by_user_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('revoked_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.Column('revoked_by_user_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('reason', sa.Text(), nullable=True),
|
||||||
|
sa.Column('expires_at', sa.DateTime(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['granted_by_user_id'], ['users.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['plugin_id'], ['plugins.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['revoked_by_user_id'], ['users.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index('idx_plugin_permission_active', 'plugin_permissions', ['plugin_id', 'user_id', 'granted'], unique=False)
|
||||||
|
op.create_index('idx_plugin_permission_plugin_name', 'plugin_permissions', ['plugin_id', 'permission_name'], unique=False)
|
||||||
|
op.create_index('idx_plugin_permission_user_plugin', 'plugin_permissions', ['user_id', 'plugin_id'], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Drop all tables in reverse dependency order"""
|
||||||
|
|
||||||
|
# Drop plugin system tables first (most dependent)
|
||||||
|
op.drop_table('plugin_permissions')
|
||||||
|
op.drop_table('plugin_api_gateways')
|
||||||
|
op.drop_table('plugin_cron_jobs')
|
||||||
|
op.drop_table('plugin_audit_logs')
|
||||||
|
op.drop_table('plugin_instances')
|
||||||
|
op.drop_table('plugin_configurations')
|
||||||
|
op.drop_table('plugins')
|
||||||
|
|
||||||
|
# Drop workflow system
|
||||||
|
op.drop_table('workflow_step_logs')
|
||||||
|
op.drop_table('workflow_executions')
|
||||||
|
op.drop_table('workflow_definitions')
|
||||||
|
|
||||||
|
# Drop modules
|
||||||
|
op.drop_table('modules')
|
||||||
|
|
||||||
|
# Drop prompt system
|
||||||
|
op.drop_table('prompt_variables')
|
||||||
|
op.drop_table('prompt_templates')
|
||||||
|
|
||||||
|
# Drop chatbot system
|
||||||
|
op.drop_table('chatbot_analytics')
|
||||||
|
op.drop_table('chatbot_messages')
|
||||||
|
op.drop_table('chatbot_conversations')
|
||||||
|
op.drop_table('chatbot_instances')
|
||||||
|
|
||||||
|
# Drop RAG system
|
||||||
|
op.drop_table('rag_documents')
|
||||||
|
op.drop_table('rag_collections')
|
||||||
|
|
||||||
|
# Drop audit system
|
||||||
|
op.drop_table('audit_logs')
|
||||||
|
|
||||||
|
# Drop usage and budget system
|
||||||
|
op.drop_table('usage_tracking')
|
||||||
|
op.drop_table('budgets')
|
||||||
|
|
||||||
|
# Drop API keys
|
||||||
|
op.drop_table('api_keys')
|
||||||
|
|
||||||
|
# Drop users (base table)
|
||||||
|
op.drop_table('users')
|
||||||
|
|
||||||
|
# Drop enums
|
||||||
|
op.execute('DROP TYPE workflowstatus')
|
||||||
591
backend/app/api/v1/plugin_registry.py
Normal file
591
backend/app/api/v1/plugin_registry.py
Normal file
@@ -0,0 +1,591 @@
|
|||||||
|
"""
|
||||||
|
Plugin Registry API Endpoints
|
||||||
|
Provides REST API for plugin management, discovery, and installation
|
||||||
|
"""
|
||||||
|
from typing import List, Dict, Any, Optional
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, BackgroundTasks
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from app.db.database import get_db
|
||||||
|
from app.core.security import get_current_user
|
||||||
|
from app.models.user import User
|
||||||
|
from app.services.plugin_registry import plugin_installer, plugin_discovery
|
||||||
|
from app.services.plugin_sandbox import plugin_loader
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
|
||||||
|
|
||||||
|
logger = get_logger("plugin.registry.api")
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
# Pydantic models for request/response
|
||||||
|
class PluginSearchRequest(BaseModel):
|
||||||
|
query: str = ""
|
||||||
|
tags: Optional[List[str]] = None
|
||||||
|
category: Optional[str] = None
|
||||||
|
limit: int = 20
|
||||||
|
|
||||||
|
|
||||||
|
class PluginInstallRequest(BaseModel):
|
||||||
|
plugin_id: str
|
||||||
|
version: str
|
||||||
|
source: str = "repository" # "repository" or "file"
|
||||||
|
|
||||||
|
|
||||||
|
class PluginUninstallRequest(BaseModel):
|
||||||
|
keep_data: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
# Discovery endpoints
|
||||||
|
@router.get("/discover")
|
||||||
|
async def discover_plugins(
|
||||||
|
query: str = "",
|
||||||
|
tags: str = "",
|
||||||
|
category: str = "",
|
||||||
|
limit: int = 20,
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Discover available plugins from repository"""
|
||||||
|
try:
|
||||||
|
tag_list = [tag.strip() for tag in tags.split(",") if tag.strip()] if tags else None
|
||||||
|
|
||||||
|
plugins = await plugin_discovery.search_available_plugins(
|
||||||
|
query=query,
|
||||||
|
tags=tag_list,
|
||||||
|
category=category if category else None,
|
||||||
|
limit=limit,
|
||||||
|
db=db
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"plugins": plugins,
|
||||||
|
"count": len(plugins),
|
||||||
|
"query": query,
|
||||||
|
"filters": {
|
||||||
|
"tags": tag_list,
|
||||||
|
"category": category
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin discovery failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Discovery failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/categories")
|
||||||
|
async def get_plugin_categories(current_user: Dict[str, Any] = Depends(get_current_user)):
|
||||||
|
"""Get available plugin categories"""
|
||||||
|
try:
|
||||||
|
categories = await plugin_discovery.get_plugin_categories()
|
||||||
|
return {"categories": categories}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get categories: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to get categories: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/installed")
|
||||||
|
async def get_installed_plugins(
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get user's installed plugins"""
|
||||||
|
try:
|
||||||
|
plugins = await plugin_discovery.get_installed_plugins(current_user["id"], db)
|
||||||
|
return {
|
||||||
|
"plugins": plugins,
|
||||||
|
"count": len(plugins)
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get installed plugins: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to get installed plugins: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/updates")
|
||||||
|
async def check_plugin_updates(
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Check for available plugin updates"""
|
||||||
|
try:
|
||||||
|
updates = await plugin_discovery.get_plugin_updates(db)
|
||||||
|
return {
|
||||||
|
"updates": updates,
|
||||||
|
"count": len(updates)
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to check updates: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to check updates: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# Installation endpoints
|
||||||
|
@router.post("/install")
|
||||||
|
async def install_plugin(
|
||||||
|
request: PluginInstallRequest,
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Install plugin from repository"""
|
||||||
|
try:
|
||||||
|
if request.source != "repository":
|
||||||
|
raise HTTPException(status_code=400, detail="Only repository installation supported via this endpoint")
|
||||||
|
|
||||||
|
# Start installation in background
|
||||||
|
background_tasks.add_task(
|
||||||
|
install_plugin_background,
|
||||||
|
request.plugin_id,
|
||||||
|
request.version,
|
||||||
|
current_user["id"],
|
||||||
|
db
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "installation_started",
|
||||||
|
"plugin_id": request.plugin_id,
|
||||||
|
"version": request.version,
|
||||||
|
"message": "Plugin installation started in background"
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin installation failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Installation failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/install/upload")
|
||||||
|
async def install_plugin_from_file(
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
background_tasks: BackgroundTasks = None,
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Install plugin from uploaded file"""
|
||||||
|
try:
|
||||||
|
# Validate file type
|
||||||
|
if not file.filename.endswith('.zip'):
|
||||||
|
raise HTTPException(status_code=400, detail="Only ZIP files are supported")
|
||||||
|
|
||||||
|
# Save uploaded file
|
||||||
|
import tempfile
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False, suffix='.zip') as temp_file:
|
||||||
|
content = await file.read()
|
||||||
|
temp_file.write(content)
|
||||||
|
temp_file_path = temp_file.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Install plugin
|
||||||
|
result = await plugin_installer.install_plugin_from_file(
|
||||||
|
temp_file_path, current_user["id"], db
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "installed",
|
||||||
|
"result": result,
|
||||||
|
"message": "Plugin installed successfully"
|
||||||
|
}
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Cleanup temp file
|
||||||
|
import os
|
||||||
|
os.unlink(temp_file_path)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"File upload installation failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Installation failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{plugin_id}")
|
||||||
|
async def uninstall_plugin(
|
||||||
|
plugin_id: str,
|
||||||
|
request: PluginUninstallRequest,
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Uninstall plugin"""
|
||||||
|
try:
|
||||||
|
result = await plugin_installer.uninstall_plugin(
|
||||||
|
plugin_id, current_user["id"], db, request.keep_data
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "uninstalled",
|
||||||
|
"result": result,
|
||||||
|
"message": "Plugin uninstalled successfully"
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin uninstall failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Uninstall failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# Plugin management endpoints
|
||||||
|
@router.post("/{plugin_id}/enable")
|
||||||
|
async def enable_plugin(
|
||||||
|
plugin_id: str,
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Enable plugin"""
|
||||||
|
try:
|
||||||
|
from app.models.plugin import Plugin
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
stmt = select(Plugin).where(Plugin.id == plugin_id)
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
plugin = result.scalar_one_or_none()
|
||||||
|
if not plugin:
|
||||||
|
raise HTTPException(status_code=404, detail="Plugin not found")
|
||||||
|
|
||||||
|
plugin.status = "enabled"
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "enabled",
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"message": "Plugin enabled successfully"
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin enable failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Enable failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{plugin_id}/disable")
|
||||||
|
async def disable_plugin(
|
||||||
|
plugin_id: str,
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Disable plugin"""
|
||||||
|
try:
|
||||||
|
from app.models.plugin import Plugin
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
stmt = select(Plugin).where(Plugin.id == plugin_id)
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
plugin = result.scalar_one_or_none()
|
||||||
|
if not plugin:
|
||||||
|
raise HTTPException(status_code=404, detail="Plugin not found")
|
||||||
|
|
||||||
|
# Unload if currently loaded
|
||||||
|
if plugin_id in plugin_loader.loaded_plugins:
|
||||||
|
await plugin_loader.unload_plugin(plugin_id)
|
||||||
|
|
||||||
|
plugin.status = "disabled"
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "disabled",
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"message": "Plugin disabled successfully"
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin disable failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Disable failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{plugin_id}/load")
|
||||||
|
async def load_plugin(
|
||||||
|
plugin_id: str,
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Load plugin into runtime"""
|
||||||
|
try:
|
||||||
|
from app.models.plugin import Plugin
|
||||||
|
from pathlib import Path
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
stmt = select(Plugin).where(Plugin.id == plugin_id)
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
plugin = result.scalar_one_or_none()
|
||||||
|
if not plugin:
|
||||||
|
raise HTTPException(status_code=404, detail="Plugin not found")
|
||||||
|
|
||||||
|
if plugin.status != "enabled":
|
||||||
|
raise HTTPException(status_code=400, detail="Plugin must be enabled to load")
|
||||||
|
|
||||||
|
if plugin_id in plugin_loader.loaded_plugins:
|
||||||
|
raise HTTPException(status_code=400, detail="Plugin already loaded")
|
||||||
|
|
||||||
|
# Load plugin
|
||||||
|
plugin_dir = Path(plugin.plugin_dir)
|
||||||
|
plugin_token = "temp_token" # TODO: Generate proper plugin tokens
|
||||||
|
|
||||||
|
await plugin_loader.load_plugin_with_sandbox(plugin_dir, plugin_token)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "loaded",
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"message": "Plugin loaded successfully"
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin load failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Load failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{plugin_id}/unload")
|
||||||
|
async def unload_plugin(
|
||||||
|
plugin_id: str,
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""Unload plugin from runtime"""
|
||||||
|
try:
|
||||||
|
if plugin_id not in plugin_loader.loaded_plugins:
|
||||||
|
raise HTTPException(status_code=404, detail="Plugin not loaded")
|
||||||
|
|
||||||
|
success = await plugin_loader.unload_plugin(plugin_id)
|
||||||
|
if not success:
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to unload plugin")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "unloaded",
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"message": "Plugin unloaded successfully"
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin unload failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Unload failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# Configuration endpoints
|
||||||
|
@router.get("/{plugin_id}/config")
|
||||||
|
async def get_plugin_configuration(
|
||||||
|
plugin_id: str,
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get plugin configuration for user with automatic decryption"""
|
||||||
|
try:
|
||||||
|
from app.services.plugin_configuration_manager import plugin_config_manager
|
||||||
|
|
||||||
|
# Use the new configuration manager to get decrypted configuration
|
||||||
|
config_data = await plugin_config_manager.get_plugin_configuration(
|
||||||
|
plugin_id=plugin_id,
|
||||||
|
user_id=current_user["id"],
|
||||||
|
db=db,
|
||||||
|
decrypt_sensitive=False # Don't decrypt sensitive data for API response
|
||||||
|
)
|
||||||
|
|
||||||
|
if config_data is not None:
|
||||||
|
return {
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"configuration": config_data,
|
||||||
|
"has_configuration": True
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
# Get default configuration from manifest
|
||||||
|
resolved_config = await plugin_config_manager.get_resolved_configuration(
|
||||||
|
plugin_id=plugin_id,
|
||||||
|
user_id=current_user["id"],
|
||||||
|
db=db
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"configuration": resolved_config,
|
||||||
|
"has_configuration": False
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get plugin configuration: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to get configuration: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{plugin_id}/config")
|
||||||
|
async def save_plugin_configuration(
|
||||||
|
plugin_id: str,
|
||||||
|
config_request: Dict[str, Any],
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Save plugin configuration for user with automatic encryption of sensitive fields"""
|
||||||
|
try:
|
||||||
|
from app.services.plugin_configuration_manager import plugin_config_manager
|
||||||
|
|
||||||
|
# Extract configuration data and metadata
|
||||||
|
config_data = config_request.get("configuration", {})
|
||||||
|
config_name = config_request.get("name", "Default Configuration")
|
||||||
|
config_description = config_request.get("description")
|
||||||
|
|
||||||
|
# Use the new configuration manager to save with automatic encryption
|
||||||
|
saved_config = await plugin_config_manager.save_plugin_configuration(
|
||||||
|
plugin_id=plugin_id,
|
||||||
|
user_id=current_user["id"],
|
||||||
|
config_data=config_data,
|
||||||
|
config_name=config_name,
|
||||||
|
config_description=config_description,
|
||||||
|
db=db
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "saved",
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"configuration_id": str(saved_config.id),
|
||||||
|
"message": "Configuration saved successfully with automatic encryption of sensitive fields"
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to save plugin configuration: {e}")
|
||||||
|
await db.rollback()
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to save configuration: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{plugin_id}/schema")
|
||||||
|
async def get_plugin_configuration_schema(
|
||||||
|
plugin_id: str,
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get plugin configuration schema from manifest"""
|
||||||
|
try:
|
||||||
|
from app.services.plugin_configuration_manager import plugin_config_manager
|
||||||
|
|
||||||
|
# Use the new configuration manager to get schema
|
||||||
|
schema = await plugin_config_manager.get_plugin_configuration_schema(plugin_id, db)
|
||||||
|
|
||||||
|
if not schema:
|
||||||
|
raise HTTPException(status_code=404, detail=f"No configuration schema available for plugin '{plugin_id}'")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"schema": schema
|
||||||
|
}
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get plugin schema: {str(e)}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to get schema: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{plugin_id}/test-credentials")
|
||||||
|
async def test_plugin_credentials(
|
||||||
|
plugin_id: str,
|
||||||
|
test_request: Dict[str, Any],
|
||||||
|
current_user: Dict[str, Any] = Depends(get_current_user),
|
||||||
|
db: AsyncSession = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Test plugin credentials (currently supports Zammad)"""
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.info(f"Testing credentials for plugin {plugin_id}")
|
||||||
|
|
||||||
|
# Get plugin from database to check its name
|
||||||
|
from app.models.plugin import Plugin
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
stmt = select(Plugin).where(Plugin.id == plugin_id)
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
plugin = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not plugin:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Plugin '{plugin_id}' not found")
|
||||||
|
|
||||||
|
# Check if this is a Zammad plugin
|
||||||
|
if plugin.name.lower() != 'zammad':
|
||||||
|
raise HTTPException(status_code=400, detail=f"Credential testing not supported for plugin '{plugin.name}'")
|
||||||
|
|
||||||
|
# Extract credentials from request
|
||||||
|
zammad_url = test_request.get('zammad_url')
|
||||||
|
api_token = test_request.get('api_token')
|
||||||
|
|
||||||
|
if not zammad_url or not api_token:
|
||||||
|
raise HTTPException(status_code=400, detail="Both zammad_url and api_token are required")
|
||||||
|
|
||||||
|
# Clean up the URL (remove trailing slash)
|
||||||
|
zammad_url = zammad_url.rstrip('/')
|
||||||
|
|
||||||
|
# Test credentials by making a read-only API call to Zammad
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
# Try to get user info - this is a safe read-only operation
|
||||||
|
test_url = f"{zammad_url}/api/v1/users/me"
|
||||||
|
headers = {
|
||||||
|
'Authorization': f'Token token={api_token}',
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await client.get(test_url, headers=headers)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
# Success - credentials are valid
|
||||||
|
user_data = response.json()
|
||||||
|
user_email = user_data.get('email', 'unknown')
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"message": f"Credentials verified! Connected as: {user_email}",
|
||||||
|
"zammad_url": zammad_url,
|
||||||
|
"user_info": {
|
||||||
|
"email": user_email,
|
||||||
|
"firstname": user_data.get('firstname', ''),
|
||||||
|
"lastname": user_data.get('lastname', '')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elif response.status_code == 401:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"message": "Invalid API token. Please check your token and try again.",
|
||||||
|
"error_code": "invalid_token"
|
||||||
|
}
|
||||||
|
elif response.status_code == 404:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"message": "Zammad URL not found. Please verify the URL is correct.",
|
||||||
|
"error_code": "invalid_url"
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
error_text = ""
|
||||||
|
try:
|
||||||
|
error_data = response.json()
|
||||||
|
error_text = error_data.get('error', error_data.get('message', ''))
|
||||||
|
except:
|
||||||
|
error_text = response.text[:200]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"message": f"Connection failed (HTTP {response.status_code}): {error_text}",
|
||||||
|
"error_code": "connection_failed"
|
||||||
|
}
|
||||||
|
|
||||||
|
except httpx.TimeoutException:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"message": "Connection timeout. Please check the Zammad URL and your network connection.",
|
||||||
|
"error_code": "timeout"
|
||||||
|
}
|
||||||
|
except httpx.ConnectError:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"message": "Could not connect to Zammad. Please verify the URL is correct and accessible.",
|
||||||
|
"error_code": "connection_error"
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to test plugin credentials: {e}")
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"message": f"Test failed: {str(e)}",
|
||||||
|
"error_code": "unknown_error"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Background task for plugin installation
|
||||||
|
async def install_plugin_background(plugin_id: str, version: str, user_id: str, db: AsyncSession):
|
||||||
|
"""Background task for plugin installation"""
|
||||||
|
try:
|
||||||
|
result = await plugin_installer.install_plugin_from_repository(
|
||||||
|
plugin_id, version, user_id, db
|
||||||
|
)
|
||||||
|
logger.info(f"Background installation completed: {result}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Background installation failed: {e}")
|
||||||
|
# TODO: Notify user of installation failure
|
||||||
338
backend/app/models/plugin.py
Normal file
338
backend/app/models/plugin.py
Normal file
@@ -0,0 +1,338 @@
|
|||||||
|
"""
|
||||||
|
Plugin System Database Models
|
||||||
|
Defines the database schema for the isolated plugin architecture
|
||||||
|
"""
|
||||||
|
from sqlalchemy import Column, Integer, String, Text, DateTime, Boolean, JSON, ForeignKey, Index
|
||||||
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from app.db.database import Base
|
||||||
|
|
||||||
|
|
||||||
|
class Plugin(Base):
|
||||||
|
"""Plugin registry - tracks all installed plugins"""
|
||||||
|
__tablename__ = "plugins"
|
||||||
|
|
||||||
|
# Primary identification
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
name = Column(String(100), unique=True, nullable=False, index=True)
|
||||||
|
slug = Column(String(100), unique=True, nullable=False, index=True) # URL-safe identifier
|
||||||
|
|
||||||
|
# Metadata
|
||||||
|
display_name = Column(String(200), nullable=False)
|
||||||
|
description = Column(Text)
|
||||||
|
version = Column(String(50), nullable=False)
|
||||||
|
author = Column(String(200))
|
||||||
|
homepage = Column(String(500))
|
||||||
|
repository = Column(String(500))
|
||||||
|
|
||||||
|
# Plugin file information
|
||||||
|
package_path = Column(String(500), nullable=False) # Path to plugin package
|
||||||
|
manifest_hash = Column(String(64), nullable=False) # SHA256 of manifest file
|
||||||
|
package_hash = Column(String(64), nullable=False) # SHA256 of plugin package
|
||||||
|
|
||||||
|
# Status and lifecycle
|
||||||
|
status = Column(String(20), nullable=False, default="installed", index=True)
|
||||||
|
# Statuses: installing, installed, enabled, disabled, error, uninstalling
|
||||||
|
enabled = Column(Boolean, default=False, nullable=False, index=True)
|
||||||
|
auto_enable = Column(Boolean, default=False, nullable=False)
|
||||||
|
|
||||||
|
# Installation tracking
|
||||||
|
installed_at = Column(DateTime, nullable=False, default=func.now())
|
||||||
|
enabled_at = Column(DateTime)
|
||||||
|
last_updated_at = Column(DateTime, default=func.now(), onupdate=func.now())
|
||||||
|
installed_by_user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
|
||||||
|
|
||||||
|
# Configuration and requirements
|
||||||
|
manifest_data = Column(JSON) # Complete plugin.yaml content
|
||||||
|
config_schema = Column(JSON) # JSON schema for plugin configuration
|
||||||
|
default_config = Column(JSON) # Default configuration values
|
||||||
|
|
||||||
|
# Security and permissions
|
||||||
|
required_permissions = Column(JSON) # List of required permission scopes
|
||||||
|
api_scopes = Column(JSON) # Required API access scopes
|
||||||
|
resource_limits = Column(JSON) # Memory, CPU, storage limits
|
||||||
|
|
||||||
|
# Database isolation
|
||||||
|
database_name = Column(String(100), unique=True) # Isolated database name
|
||||||
|
database_url = Column(String(1000)) # Connection string for plugin database
|
||||||
|
|
||||||
|
# Error tracking
|
||||||
|
last_error = Column(Text)
|
||||||
|
error_count = Column(Integer, default=0)
|
||||||
|
last_error_at = Column(DateTime)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
installed_by_user = relationship("User", back_populates="installed_plugins")
|
||||||
|
configurations = relationship("PluginConfiguration", back_populates="plugin", cascade="all, delete-orphan")
|
||||||
|
instances = relationship("PluginInstance", back_populates="plugin", cascade="all, delete-orphan")
|
||||||
|
audit_logs = relationship("PluginAuditLog", back_populates="plugin", cascade="all, delete-orphan")
|
||||||
|
cron_jobs = relationship("PluginCronJob", back_populates="plugin", cascade="all, delete-orphan")
|
||||||
|
|
||||||
|
# Indexes for performance
|
||||||
|
__table_args__ = (
|
||||||
|
Index('idx_plugin_status_enabled', 'status', 'enabled'),
|
||||||
|
Index('idx_plugin_user_status', 'installed_by_user_id', 'status'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PluginConfiguration(Base):
|
||||||
|
"""Plugin configuration instances - per user/environment configs"""
|
||||||
|
__tablename__ = "plugin_configurations"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
plugin_id = Column(UUID(as_uuid=True), ForeignKey("plugins.id"), nullable=False)
|
||||||
|
user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
|
||||||
|
|
||||||
|
# Configuration data
|
||||||
|
name = Column(String(200), nullable=False) # Human-readable config name
|
||||||
|
description = Column(Text)
|
||||||
|
config_data = Column(JSON, nullable=False) # Non-sensitive configuration values
|
||||||
|
encrypted_data = Column(Text) # Encrypted sensitive fields (JSON string)
|
||||||
|
schema_version = Column(String(50)) # Schema version for migration support
|
||||||
|
is_active = Column(Boolean, default=False, nullable=False)
|
||||||
|
is_default = Column(Boolean, default=False, nullable=False)
|
||||||
|
|
||||||
|
# Metadata
|
||||||
|
created_at = Column(DateTime, nullable=False, default=func.now())
|
||||||
|
updated_at = Column(DateTime, default=func.now(), onupdate=func.now())
|
||||||
|
created_by_user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
plugin = relationship("Plugin", back_populates="configurations")
|
||||||
|
user = relationship("User", foreign_keys=[user_id])
|
||||||
|
created_by_user = relationship("User", foreign_keys=[created_by_user_id])
|
||||||
|
|
||||||
|
# Constraints
|
||||||
|
__table_args__ = (
|
||||||
|
Index('idx_plugin_config_user_active', 'plugin_id', 'user_id', 'is_active'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PluginInstance(Base):
|
||||||
|
"""Plugin runtime instances - tracks running plugin processes"""
|
||||||
|
__tablename__ = "plugin_instances"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
plugin_id = Column(UUID(as_uuid=True), ForeignKey("plugins.id"), nullable=False)
|
||||||
|
configuration_id = Column(UUID(as_uuid=True), ForeignKey("plugin_configurations.id"))
|
||||||
|
|
||||||
|
# Runtime information
|
||||||
|
instance_name = Column(String(200), nullable=False)
|
||||||
|
process_id = Column(String(100)) # Docker container ID or process ID
|
||||||
|
status = Column(String(20), nullable=False, default="starting", index=True)
|
||||||
|
# Statuses: starting, running, stopping, stopped, error, crashed
|
||||||
|
|
||||||
|
# Performance tracking
|
||||||
|
start_time = Column(DateTime, nullable=False, default=func.now())
|
||||||
|
last_heartbeat = Column(DateTime, default=func.now())
|
||||||
|
stop_time = Column(DateTime)
|
||||||
|
restart_count = Column(Integer, default=0)
|
||||||
|
|
||||||
|
# Resource usage
|
||||||
|
memory_usage_mb = Column(Integer)
|
||||||
|
cpu_usage_percent = Column(Integer)
|
||||||
|
|
||||||
|
# Health monitoring
|
||||||
|
health_status = Column(String(20), default="unknown") # healthy, warning, critical
|
||||||
|
health_message = Column(Text)
|
||||||
|
last_health_check = Column(DateTime)
|
||||||
|
|
||||||
|
# Error tracking
|
||||||
|
last_error = Column(Text)
|
||||||
|
error_count = Column(Integer, default=0)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
plugin = relationship("Plugin", back_populates="instances")
|
||||||
|
configuration = relationship("PluginConfiguration")
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index('idx_plugin_instance_status', 'plugin_id', 'status'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PluginAuditLog(Base):
|
||||||
|
"""Audit logging for all plugin activities"""
|
||||||
|
__tablename__ = "plugin_audit_logs"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
plugin_id = Column(UUID(as_uuid=True), ForeignKey("plugins.id"), nullable=False)
|
||||||
|
instance_id = Column(UUID(as_uuid=True), ForeignKey("plugin_instances.id"))
|
||||||
|
|
||||||
|
# Event details
|
||||||
|
event_type = Column(String(50), nullable=False, index=True) # api_call, config_change, error, etc.
|
||||||
|
action = Column(String(100), nullable=False)
|
||||||
|
resource = Column(String(200)) # Resource being accessed
|
||||||
|
|
||||||
|
# Context information
|
||||||
|
user_id = Column(Integer, ForeignKey("users.id"))
|
||||||
|
api_key_id = Column(Integer, ForeignKey("api_keys.id"))
|
||||||
|
ip_address = Column(String(45)) # IPv4 or IPv6
|
||||||
|
user_agent = Column(String(500))
|
||||||
|
|
||||||
|
# Request/response data
|
||||||
|
request_data = Column(JSON) # Sanitized request data
|
||||||
|
response_status = Column(Integer)
|
||||||
|
response_data = Column(JSON) # Sanitized response data
|
||||||
|
|
||||||
|
# Performance metrics
|
||||||
|
duration_ms = Column(Integer)
|
||||||
|
|
||||||
|
# Status and errors
|
||||||
|
success = Column(Boolean, nullable=False, index=True)
|
||||||
|
error_message = Column(Text)
|
||||||
|
|
||||||
|
# Timestamps
|
||||||
|
timestamp = Column(DateTime, nullable=False, default=func.now(), index=True)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
plugin = relationship("Plugin", back_populates="audit_logs")
|
||||||
|
instance = relationship("PluginInstance")
|
||||||
|
user = relationship("User")
|
||||||
|
api_key = relationship("APIKey")
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index('idx_plugin_audit_plugin_time', 'plugin_id', 'timestamp'),
|
||||||
|
Index('idx_plugin_audit_user_time', 'user_id', 'timestamp'),
|
||||||
|
Index('idx_plugin_audit_event_type', 'event_type', 'timestamp'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PluginCronJob(Base):
|
||||||
|
"""Plugin scheduled jobs and cron tasks"""
|
||||||
|
__tablename__ = "plugin_cron_jobs"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
plugin_id = Column(UUID(as_uuid=True), ForeignKey("plugins.id"), nullable=False)
|
||||||
|
|
||||||
|
# Job identification
|
||||||
|
job_name = Column(String(200), nullable=False)
|
||||||
|
job_id = Column(String(100), nullable=False, unique=True, index=True) # Unique scheduler ID
|
||||||
|
|
||||||
|
# Schedule configuration
|
||||||
|
schedule = Column(String(100), nullable=False) # Cron expression
|
||||||
|
timezone = Column(String(50), default="UTC")
|
||||||
|
enabled = Column(Boolean, default=True, nullable=False, index=True)
|
||||||
|
|
||||||
|
# Job details
|
||||||
|
description = Column(Text)
|
||||||
|
function_name = Column(String(200), nullable=False) # Plugin function to call
|
||||||
|
job_data = Column(JSON) # Parameters for the job function
|
||||||
|
|
||||||
|
# Execution tracking
|
||||||
|
last_run_at = Column(DateTime)
|
||||||
|
next_run_at = Column(DateTime, index=True)
|
||||||
|
last_duration_ms = Column(Integer)
|
||||||
|
run_count = Column(Integer, default=0)
|
||||||
|
success_count = Column(Integer, default=0)
|
||||||
|
error_count = Column(Integer, default=0)
|
||||||
|
|
||||||
|
# Error handling
|
||||||
|
last_error = Column(Text)
|
||||||
|
last_error_at = Column(DateTime)
|
||||||
|
max_retries = Column(Integer, default=3)
|
||||||
|
retry_delay_seconds = Column(Integer, default=60)
|
||||||
|
|
||||||
|
# Lifecycle
|
||||||
|
created_at = Column(DateTime, nullable=False, default=func.now())
|
||||||
|
updated_at = Column(DateTime, default=func.now(), onupdate=func.now())
|
||||||
|
created_by_user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
plugin = relationship("Plugin", back_populates="cron_jobs")
|
||||||
|
created_by_user = relationship("User")
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index('idx_plugin_cron_next_run', 'enabled', 'next_run_at'),
|
||||||
|
Index('idx_plugin_cron_plugin', 'plugin_id', 'enabled'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PluginAPIGateway(Base):
|
||||||
|
"""API gateway configuration for plugin routing"""
|
||||||
|
__tablename__ = "plugin_api_gateways"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
plugin_id = Column(UUID(as_uuid=True), ForeignKey("plugins.id"), nullable=False, unique=True)
|
||||||
|
|
||||||
|
# API routing configuration
|
||||||
|
base_path = Column(String(200), nullable=False, unique=True) # /api/v1/plugins/zammad
|
||||||
|
internal_url = Column(String(500), nullable=False) # http://plugin-zammad:8000
|
||||||
|
|
||||||
|
# Security settings
|
||||||
|
require_authentication = Column(Boolean, default=True, nullable=False)
|
||||||
|
allowed_methods = Column(JSON, default=["GET", "POST", "PUT", "DELETE"]) # HTTP methods
|
||||||
|
rate_limit_per_minute = Column(Integer, default=60)
|
||||||
|
rate_limit_per_hour = Column(Integer, default=1000)
|
||||||
|
|
||||||
|
# CORS settings
|
||||||
|
cors_enabled = Column(Boolean, default=True, nullable=False)
|
||||||
|
cors_origins = Column(JSON, default=["*"])
|
||||||
|
cors_methods = Column(JSON, default=["GET", "POST", "PUT", "DELETE", "OPTIONS"])
|
||||||
|
cors_headers = Column(JSON, default=["*"])
|
||||||
|
|
||||||
|
# Circuit breaker settings
|
||||||
|
circuit_breaker_enabled = Column(Boolean, default=True, nullable=False)
|
||||||
|
failure_threshold = Column(Integer, default=5)
|
||||||
|
recovery_timeout_seconds = Column(Integer, default=60)
|
||||||
|
|
||||||
|
# Monitoring
|
||||||
|
enabled = Column(Boolean, default=True, nullable=False, index=True)
|
||||||
|
last_health_check = Column(DateTime)
|
||||||
|
health_status = Column(String(20), default="unknown") # healthy, unhealthy, timeout
|
||||||
|
|
||||||
|
# Timestamps
|
||||||
|
created_at = Column(DateTime, nullable=False, default=func.now())
|
||||||
|
updated_at = Column(DateTime, default=func.now(), onupdate=func.now())
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
plugin = relationship("Plugin")
|
||||||
|
|
||||||
|
|
||||||
|
# Add relationships to existing User model (import this in user.py)
|
||||||
|
"""
|
||||||
|
Add to User model:
|
||||||
|
installed_plugins = relationship("Plugin", back_populates="installed_by_user")
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Add relationships to existing APIKey model (import this in api_key.py)
|
||||||
|
"""
|
||||||
|
Add to APIKey model:
|
||||||
|
plugin_audit_logs = relationship("PluginAuditLog", back_populates="api_key")
|
||||||
|
"""
|
||||||
|
|
||||||
|
class PluginPermission(Base):
|
||||||
|
"""Plugin permission grants - tracks user permissions for plugins"""
|
||||||
|
__tablename__ = "plugin_permissions"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
plugin_id = Column(UUID(as_uuid=True), ForeignKey("plugins.id"), nullable=False)
|
||||||
|
user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
|
||||||
|
|
||||||
|
# Permission details
|
||||||
|
permission_name = Column(String(200), nullable=False) # e.g., 'chatbot:invoke', 'rag:query'
|
||||||
|
granted = Column(Boolean, default=True, nullable=False) # True=granted, False=revoked
|
||||||
|
|
||||||
|
# Grant/revoke tracking
|
||||||
|
granted_at = Column(DateTime, nullable=False, default=func.now())
|
||||||
|
granted_by_user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
|
||||||
|
revoked_at = Column(DateTime)
|
||||||
|
revoked_by_user_id = Column(Integer, ForeignKey("users.id"))
|
||||||
|
|
||||||
|
# Metadata
|
||||||
|
reason = Column(Text) # Reason for grant/revocation
|
||||||
|
expires_at = Column(DateTime) # Optional expiration time
|
||||||
|
|
||||||
|
# Relationships
|
||||||
|
plugin = relationship("Plugin")
|
||||||
|
user = relationship("User", foreign_keys=[user_id])
|
||||||
|
granted_by_user = relationship("User", foreign_keys=[granted_by_user_id])
|
||||||
|
revoked_by_user = relationship("User", foreign_keys=[revoked_by_user_id])
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index('idx_plugin_permission_user_plugin', 'user_id', 'plugin_id'),
|
||||||
|
Index('idx_plugin_permission_plugin_name', 'plugin_id', 'permission_name'),
|
||||||
|
Index('idx_plugin_permission_active', 'plugin_id', 'user_id', 'granted'),
|
||||||
|
)
|
||||||
436
backend/app/schemas/plugin_manifest.py
Normal file
436
backend/app/schemas/plugin_manifest.py
Normal file
@@ -0,0 +1,436 @@
|
|||||||
|
"""
|
||||||
|
Plugin Manifest Schema and Validation
|
||||||
|
Defines the structure and validation for plugin manifest files
|
||||||
|
"""
|
||||||
|
from typing import List, Dict, Any, Optional, Union
|
||||||
|
from pydantic import BaseModel, Field, validator, HttpUrl
|
||||||
|
from enum import Enum
|
||||||
|
import yaml
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
class PluginRuntimeSpec(BaseModel):
|
||||||
|
"""Plugin runtime requirements and dependencies"""
|
||||||
|
python_version: str = Field("3.11", description="Required Python version")
|
||||||
|
dependencies: List[str] = Field(default_factory=list, description="Required Python packages")
|
||||||
|
environment_variables: Dict[str, str] = Field(default_factory=dict, description="Required environment variables")
|
||||||
|
|
||||||
|
@validator('python_version')
|
||||||
|
def validate_python_version(cls, v):
|
||||||
|
if not v.startswith(('3.9', '3.10', '3.11', '3.12')):
|
||||||
|
raise ValueError('Python version must be 3.9, 3.10, 3.11, or 3.12')
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class PluginPermissions(BaseModel):
|
||||||
|
"""Plugin permission specifications"""
|
||||||
|
platform_apis: List[str] = Field(default_factory=list, description="Platform API access scopes")
|
||||||
|
plugin_scopes: List[str] = Field(default_factory=list, description="Plugin-specific permission scopes")
|
||||||
|
external_domains: List[str] = Field(default_factory=list, description="Allowed external domains")
|
||||||
|
|
||||||
|
@validator('platform_apis')
|
||||||
|
def validate_platform_apis(cls, v):
|
||||||
|
allowed_apis = [
|
||||||
|
'chatbot:invoke', 'chatbot:manage', 'chatbot:read',
|
||||||
|
'rag:query', 'rag:manage', 'rag:read',
|
||||||
|
'llm:completion', 'llm:embeddings', 'llm:models',
|
||||||
|
'workflow:execute', 'workflow:read',
|
||||||
|
'cache:read', 'cache:write'
|
||||||
|
]
|
||||||
|
for api in v:
|
||||||
|
if api not in allowed_apis and not api.endswith(':*'):
|
||||||
|
raise ValueError(f'Invalid platform API scope: {api}')
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class PluginDatabaseSpec(BaseModel):
|
||||||
|
"""Plugin database configuration"""
|
||||||
|
schema: str = Field(..., description="Database schema name")
|
||||||
|
migrations_path: str = Field("./migrations", description="Path to migration files")
|
||||||
|
auto_migrate: bool = Field(True, description="Auto-run migrations on startup")
|
||||||
|
|
||||||
|
@validator('schema')
|
||||||
|
def validate_schema_name(cls, v):
|
||||||
|
if not v.startswith('plugin_'):
|
||||||
|
raise ValueError('Database schema must start with "plugin_"')
|
||||||
|
if not v.replace('plugin_', '').replace('_', '').isalnum():
|
||||||
|
raise ValueError('Schema name must contain only alphanumeric characters and underscores')
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class PluginAPIEndpoint(BaseModel):
|
||||||
|
"""Plugin API endpoint specification"""
|
||||||
|
path: str = Field(..., description="API endpoint path")
|
||||||
|
methods: List[str] = Field(default=['GET'], description="Allowed HTTP methods")
|
||||||
|
description: str = Field("", description="Endpoint description")
|
||||||
|
auth_required: bool = Field(True, description="Whether authentication is required")
|
||||||
|
|
||||||
|
@validator('methods')
|
||||||
|
def validate_methods(cls, v):
|
||||||
|
allowed_methods = ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS']
|
||||||
|
for method in v:
|
||||||
|
if method not in allowed_methods:
|
||||||
|
raise ValueError(f'Invalid HTTP method: {method}')
|
||||||
|
return v
|
||||||
|
|
||||||
|
@validator('path')
|
||||||
|
def validate_path(cls, v):
|
||||||
|
if not v.startswith('/'):
|
||||||
|
raise ValueError('API path must start with "/"')
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class PluginCronJob(BaseModel):
|
||||||
|
"""Plugin scheduled job specification"""
|
||||||
|
name: str = Field(..., description="Job name")
|
||||||
|
schedule: str = Field(..., description="Cron expression")
|
||||||
|
function: str = Field(..., description="Function to execute")
|
||||||
|
description: str = Field("", description="Job description")
|
||||||
|
enabled: bool = Field(True, description="Whether job is enabled by default")
|
||||||
|
timeout_seconds: int = Field(300, description="Job timeout in seconds")
|
||||||
|
max_retries: int = Field(3, description="Maximum retry attempts")
|
||||||
|
|
||||||
|
@validator('schedule')
|
||||||
|
def validate_cron_expression(cls, v):
|
||||||
|
# Basic cron validation - should have 5 parts
|
||||||
|
parts = v.split()
|
||||||
|
if len(parts) != 5:
|
||||||
|
raise ValueError('Cron expression must have 5 parts (minute hour day month weekday)')
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class PluginUIConfig(BaseModel):
|
||||||
|
"""Plugin UI configuration"""
|
||||||
|
configuration_schema: str = Field("./config_schema.json", description="JSON schema for configuration")
|
||||||
|
ui_components: str = Field("./ui/components", description="Path to UI components")
|
||||||
|
pages: List[Dict[str, str]] = Field(default_factory=list, description="Plugin pages")
|
||||||
|
|
||||||
|
@validator('pages')
|
||||||
|
def validate_pages(cls, v):
|
||||||
|
required_fields = ['name', 'path', 'component']
|
||||||
|
for page in v:
|
||||||
|
for field in required_fields:
|
||||||
|
if field not in page:
|
||||||
|
raise ValueError(f'Page must have {field} field')
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class PluginExternalServices(BaseModel):
|
||||||
|
"""Plugin external service configuration"""
|
||||||
|
allowed_domains: List[str] = Field(default_factory=list, description="Allowed external domains")
|
||||||
|
webhooks: List[Dict[str, str]] = Field(default_factory=list, description="Webhook configurations")
|
||||||
|
rate_limits: Dict[str, int] = Field(default_factory=dict, description="Rate limits per domain")
|
||||||
|
|
||||||
|
|
||||||
|
class PluginMetadata(BaseModel):
|
||||||
|
"""Plugin metadata information"""
|
||||||
|
name: str = Field(..., description="Plugin name (must be unique)")
|
||||||
|
version: str = Field(..., description="Plugin version (semantic versioning)")
|
||||||
|
description: str = Field(..., description="Plugin description")
|
||||||
|
author: str = Field(..., description="Plugin author")
|
||||||
|
license: str = Field("MIT", description="Plugin license")
|
||||||
|
homepage: Optional[HttpUrl] = Field(None, description="Plugin homepage URL")
|
||||||
|
repository: Optional[HttpUrl] = Field(None, description="Plugin repository URL")
|
||||||
|
tags: List[str] = Field(default_factory=list, description="Plugin tags for discovery")
|
||||||
|
|
||||||
|
@validator('name')
|
||||||
|
def validate_name(cls, v):
|
||||||
|
if not v.replace('-', '').replace('_', '').isalnum():
|
||||||
|
raise ValueError('Plugin name must contain only alphanumeric characters, hyphens, and underscores')
|
||||||
|
if len(v) < 3 or len(v) > 50:
|
||||||
|
raise ValueError('Plugin name must be between 3 and 50 characters')
|
||||||
|
return v.lower()
|
||||||
|
|
||||||
|
@validator('version')
|
||||||
|
def validate_version(cls, v):
|
||||||
|
# Basic semantic versioning validation
|
||||||
|
parts = v.split('.')
|
||||||
|
if len(parts) != 3:
|
||||||
|
raise ValueError('Version must follow semantic versioning (x.y.z)')
|
||||||
|
for part in parts:
|
||||||
|
if not part.isdigit():
|
||||||
|
raise ValueError('Version parts must be numeric')
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class PluginManifest(BaseModel):
|
||||||
|
"""Complete plugin manifest specification"""
|
||||||
|
apiVersion: str = Field("v1", description="Manifest API version")
|
||||||
|
kind: str = Field("Plugin", description="Resource kind")
|
||||||
|
metadata: PluginMetadata = Field(..., description="Plugin metadata")
|
||||||
|
spec: "PluginSpec" = Field(..., description="Plugin specification")
|
||||||
|
|
||||||
|
@validator('apiVersion')
|
||||||
|
def validate_api_version(cls, v):
|
||||||
|
if v not in ['v1']:
|
||||||
|
raise ValueError('Unsupported API version')
|
||||||
|
return v
|
||||||
|
|
||||||
|
@validator('kind')
|
||||||
|
def validate_kind(cls, v):
|
||||||
|
if v != 'Plugin':
|
||||||
|
raise ValueError('Kind must be "Plugin"')
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class PluginSpec(BaseModel):
|
||||||
|
"""Plugin specification details"""
|
||||||
|
runtime: PluginRuntimeSpec = Field(default_factory=PluginRuntimeSpec, description="Runtime requirements")
|
||||||
|
permissions: PluginPermissions = Field(default_factory=PluginPermissions, description="Permission requirements")
|
||||||
|
database: Optional[PluginDatabaseSpec] = Field(None, description="Database configuration")
|
||||||
|
api_endpoints: List[PluginAPIEndpoint] = Field(default_factory=list, description="API endpoints")
|
||||||
|
cron_jobs: List[PluginCronJob] = Field(default_factory=list, description="Scheduled jobs")
|
||||||
|
ui_config: Optional[PluginUIConfig] = Field(None, description="UI configuration")
|
||||||
|
external_services: Optional[PluginExternalServices] = Field(None, description="External service configuration")
|
||||||
|
config_schema: Dict[str, Any] = Field(default_factory=dict, description="Plugin configuration JSON schema")
|
||||||
|
|
||||||
|
|
||||||
|
# Update forward reference
|
||||||
|
PluginManifest.model_rebuild()
|
||||||
|
|
||||||
|
|
||||||
|
class PluginManifestValidator:
|
||||||
|
"""Plugin manifest validation and parsing utilities"""
|
||||||
|
|
||||||
|
REQUIRED_FILES = [
|
||||||
|
'manifest.yaml',
|
||||||
|
'main.py',
|
||||||
|
'requirements.txt'
|
||||||
|
]
|
||||||
|
|
||||||
|
OPTIONAL_FILES = [
|
||||||
|
'config_schema.json',
|
||||||
|
'README.md',
|
||||||
|
'ui/components',
|
||||||
|
'migrations',
|
||||||
|
'tests'
|
||||||
|
]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load_from_file(cls, manifest_path: Union[str, Path]) -> PluginManifest:
|
||||||
|
"""Load and validate plugin manifest from YAML file"""
|
||||||
|
manifest_path = Path(manifest_path)
|
||||||
|
|
||||||
|
if not manifest_path.exists():
|
||||||
|
raise FileNotFoundError(f"Manifest file not found: {manifest_path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(manifest_path, 'r', encoding='utf-8') as f:
|
||||||
|
manifest_data = yaml.safe_load(f)
|
||||||
|
except yaml.YAMLError as e:
|
||||||
|
raise ValueError(f"Invalid YAML in manifest file: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
manifest = PluginManifest(**manifest_data)
|
||||||
|
except Exception as e:
|
||||||
|
raise ValueError(f"Invalid manifest structure: {e}")
|
||||||
|
|
||||||
|
# Additional validation
|
||||||
|
cls._validate_plugin_structure(manifest_path.parent, manifest)
|
||||||
|
|
||||||
|
return manifest
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _validate_plugin_structure(cls, plugin_dir: Path, manifest: PluginManifest):
|
||||||
|
"""Validate plugin directory structure and required files"""
|
||||||
|
|
||||||
|
# Check required files
|
||||||
|
for required_file in cls.REQUIRED_FILES:
|
||||||
|
file_path = plugin_dir / required_file
|
||||||
|
if not file_path.exists():
|
||||||
|
raise FileNotFoundError(f"Required file missing: {required_file}")
|
||||||
|
|
||||||
|
# Validate main.py contains plugin class
|
||||||
|
main_py_path = plugin_dir / 'main.py'
|
||||||
|
with open(main_py_path, 'r', encoding='utf-8') as f:
|
||||||
|
main_content = f.read()
|
||||||
|
|
||||||
|
if 'BasePlugin' not in main_content:
|
||||||
|
raise ValueError("main.py must contain a class inheriting from BasePlugin")
|
||||||
|
|
||||||
|
# Validate requirements.txt format
|
||||||
|
requirements_path = plugin_dir / 'requirements.txt'
|
||||||
|
with open(requirements_path, 'r', encoding='utf-8') as f:
|
||||||
|
requirements = f.read().strip()
|
||||||
|
|
||||||
|
if requirements and not all(line.strip() for line in requirements.split('\n')):
|
||||||
|
raise ValueError("Invalid requirements.txt format")
|
||||||
|
|
||||||
|
# Validate config schema if specified
|
||||||
|
if manifest.spec.ui_config and manifest.spec.ui_config.configuration_schema:
|
||||||
|
schema_path = plugin_dir / manifest.spec.ui_config.configuration_schema
|
||||||
|
if schema_path.exists():
|
||||||
|
try:
|
||||||
|
import json
|
||||||
|
with open(schema_path, 'r', encoding='utf-8') as f:
|
||||||
|
json.load(f)
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
raise ValueError(f"Invalid JSON schema: {e}")
|
||||||
|
|
||||||
|
# Validate migrations if database is specified
|
||||||
|
if manifest.spec.database:
|
||||||
|
migrations_path = plugin_dir / manifest.spec.database.migrations_path
|
||||||
|
if migrations_path.exists() and not migrations_path.is_dir():
|
||||||
|
raise ValueError("Migrations path must be a directory")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate_plugin_compatibility(cls, manifest: PluginManifest) -> Dict[str, Any]:
|
||||||
|
"""Validate plugin compatibility with platform"""
|
||||||
|
|
||||||
|
compatibility_report = {
|
||||||
|
"compatible": True,
|
||||||
|
"warnings": [],
|
||||||
|
"errors": [],
|
||||||
|
"platform_version": "1.0.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check platform API compatibility
|
||||||
|
unsupported_apis = []
|
||||||
|
for api in manifest.spec.permissions.platform_apis:
|
||||||
|
if not cls._is_platform_api_supported(api):
|
||||||
|
unsupported_apis.append(api)
|
||||||
|
|
||||||
|
if unsupported_apis:
|
||||||
|
compatibility_report["errors"].append(
|
||||||
|
f"Unsupported platform APIs: {', '.join(unsupported_apis)}"
|
||||||
|
)
|
||||||
|
compatibility_report["compatible"] = False
|
||||||
|
|
||||||
|
# Check Python version compatibility
|
||||||
|
required_version = manifest.spec.runtime.python_version
|
||||||
|
if not cls._is_python_version_supported(required_version):
|
||||||
|
compatibility_report["errors"].append(
|
||||||
|
f"Unsupported Python version: {required_version}"
|
||||||
|
)
|
||||||
|
compatibility_report["compatible"] = False
|
||||||
|
|
||||||
|
# Check dependency compatibility
|
||||||
|
for dependency in manifest.spec.runtime.dependencies:
|
||||||
|
if cls._is_dependency_conflicting(dependency):
|
||||||
|
compatibility_report["warnings"].append(
|
||||||
|
f"Potential dependency conflict: {dependency}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return compatibility_report
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _is_platform_api_supported(cls, api: str) -> bool:
|
||||||
|
"""Check if platform API is supported"""
|
||||||
|
supported_apis = [
|
||||||
|
'chatbot:invoke', 'chatbot:manage', 'chatbot:read',
|
||||||
|
'rag:query', 'rag:manage', 'rag:read',
|
||||||
|
'llm:completion', 'llm:embeddings', 'llm:models',
|
||||||
|
'workflow:execute', 'workflow:read',
|
||||||
|
'cache:read', 'cache:write'
|
||||||
|
]
|
||||||
|
|
||||||
|
# Support wildcard permissions
|
||||||
|
if api.endswith(':*'):
|
||||||
|
base_api = api[:-2]
|
||||||
|
return any(supported.startswith(base_api + ':') for supported in supported_apis)
|
||||||
|
|
||||||
|
return api in supported_apis
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _is_python_version_supported(cls, version: str) -> bool:
|
||||||
|
"""Check if Python version is supported"""
|
||||||
|
supported_versions = ['3.9', '3.10', '3.11', '3.12']
|
||||||
|
return any(version.startswith(v) for v in supported_versions)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _is_dependency_conflicting(cls, dependency: str) -> bool:
|
||||||
|
"""Check if dependency might conflict with platform"""
|
||||||
|
# Extract package name (before ==, >=, etc.)
|
||||||
|
package_name = dependency.split('==')[0].split('>=')[0].split('<=')[0].split('>')[0].split('<')[0].strip()
|
||||||
|
|
||||||
|
# Known conflicting packages
|
||||||
|
conflicting_packages = [
|
||||||
|
'sqlalchemy', # Platform uses specific version
|
||||||
|
'fastapi', # Platform uses specific version
|
||||||
|
'pydantic', # Platform uses specific version
|
||||||
|
'alembic' # Platform migration system
|
||||||
|
]
|
||||||
|
|
||||||
|
return package_name.lower() in conflicting_packages
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def generate_manifest_hash(cls, manifest: PluginManifest) -> str:
|
||||||
|
"""Generate hash for manifest content verification"""
|
||||||
|
manifest_dict = manifest.dict()
|
||||||
|
manifest_str = yaml.dump(manifest_dict, sort_keys=True, default_flow_style=False)
|
||||||
|
return hashlib.sha256(manifest_str.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_example_manifest(cls, plugin_name: str) -> PluginManifest:
|
||||||
|
"""Create an example plugin manifest for development"""
|
||||||
|
return PluginManifest(
|
||||||
|
metadata=PluginMetadata(
|
||||||
|
name=plugin_name,
|
||||||
|
version="1.0.0",
|
||||||
|
description=f"Example {plugin_name} plugin for Enclava platform",
|
||||||
|
author="Enclava Team",
|
||||||
|
license="MIT",
|
||||||
|
tags=["integration", "example"]
|
||||||
|
),
|
||||||
|
spec=PluginSpec(
|
||||||
|
runtime=PluginRuntimeSpec(
|
||||||
|
python_version="3.11",
|
||||||
|
dependencies=[
|
||||||
|
"aiohttp>=3.8.0",
|
||||||
|
"pydantic>=2.0.0"
|
||||||
|
]
|
||||||
|
),
|
||||||
|
permissions=PluginPermissions(
|
||||||
|
platform_apis=["chatbot:invoke", "rag:query"],
|
||||||
|
plugin_scopes=["read", "write"]
|
||||||
|
),
|
||||||
|
database=PluginDatabaseSpec(
|
||||||
|
schema=f"plugin_{plugin_name}",
|
||||||
|
migrations_path="./migrations"
|
||||||
|
),
|
||||||
|
api_endpoints=[
|
||||||
|
PluginAPIEndpoint(
|
||||||
|
path="/status",
|
||||||
|
methods=["GET"],
|
||||||
|
description="Plugin health status"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
ui_config=PluginUIConfig(
|
||||||
|
configuration_schema="./config_schema.json",
|
||||||
|
pages=[
|
||||||
|
{
|
||||||
|
"name": "dashboard",
|
||||||
|
"path": f"/plugins/{plugin_name}",
|
||||||
|
"component": f"{plugin_name.title()}Dashboard"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_manifest_file(manifest_path: Union[str, Path]) -> Dict[str, Any]:
|
||||||
|
"""Validate a plugin manifest file and return validation results"""
|
||||||
|
try:
|
||||||
|
manifest = PluginManifestValidator.load_from_file(manifest_path)
|
||||||
|
compatibility = PluginManifestValidator.validate_plugin_compatibility(manifest)
|
||||||
|
manifest_hash = PluginManifestValidator.generate_manifest_hash(manifest)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"valid": True,
|
||||||
|
"manifest": manifest,
|
||||||
|
"compatibility": compatibility,
|
||||||
|
"hash": manifest_hash,
|
||||||
|
"errors": []
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return {
|
||||||
|
"valid": False,
|
||||||
|
"manifest": None,
|
||||||
|
"compatibility": None,
|
||||||
|
"hash": None,
|
||||||
|
"errors": [str(e)]
|
||||||
|
}
|
||||||
603
backend/app/services/base_plugin.py
Normal file
603
backend/app/services/base_plugin.py
Normal file
@@ -0,0 +1,603 @@
|
|||||||
|
"""
|
||||||
|
Base Plugin Class and Plugin Runtime Environment
|
||||||
|
Provides the foundation for all Enclava plugins with security and isolation
|
||||||
|
"""
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Dict, Any, List, Optional, Tuple
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from fastapi import APIRouter, Request, HTTPException, Depends
|
||||||
|
import asyncio
|
||||||
|
import aiohttp
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import importlib.util
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from app.schemas.plugin_manifest import PluginManifest, PluginManifestValidator
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.utils.exceptions import SecurityError, ValidationError
|
||||||
|
from app.models.plugin import PluginConfiguration
|
||||||
|
from app.models.user import User
|
||||||
|
from app.db.database import get_db
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PluginContext:
|
||||||
|
"""Plugin execution context with user and authentication info"""
|
||||||
|
user_id: Optional[str] = None
|
||||||
|
api_key_id: Optional[str] = None
|
||||||
|
user_permissions: List[str] = None
|
||||||
|
ip_address: Optional[str] = None
|
||||||
|
user_agent: Optional[str] = None
|
||||||
|
request_id: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class PlatformAPIClient:
|
||||||
|
"""Secure client for plugins to access platform APIs"""
|
||||||
|
|
||||||
|
def __init__(self, plugin_id: str, plugin_token: str):
|
||||||
|
self.plugin_id = plugin_id
|
||||||
|
self.plugin_token = plugin_token
|
||||||
|
self.base_url = settings.INTERNAL_API_URL or "http://localhost:58000"
|
||||||
|
self.logger = get_logger(f"plugin.{plugin_id}.api_client")
|
||||||
|
|
||||||
|
async def _make_request(self, method: str, endpoint: str, **kwargs) -> Dict[str, Any]:
|
||||||
|
"""Make authenticated request to platform API"""
|
||||||
|
headers = kwargs.setdefault('headers', {})
|
||||||
|
headers.update({
|
||||||
|
'Authorization': f'Bearer {self.plugin_token}',
|
||||||
|
'X-Plugin-ID': self.plugin_id,
|
||||||
|
'X-Platform-Client': 'plugin',
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
})
|
||||||
|
|
||||||
|
url = f"{self.base_url}{endpoint}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.request(method, url, **kwargs) as response:
|
||||||
|
if response.status >= 400:
|
||||||
|
error_text = await response.text()
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=response.status,
|
||||||
|
detail=f"Platform API error: {error_text}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.content_type == 'application/json':
|
||||||
|
return await response.json()
|
||||||
|
else:
|
||||||
|
return {"data": await response.text()}
|
||||||
|
|
||||||
|
except aiohttp.ClientError as e:
|
||||||
|
self.logger.error(f"Platform API client error: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail=f"Platform API unavailable: {str(e)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get(self, endpoint: str, **kwargs) -> Dict[str, Any]:
|
||||||
|
"""GET request to platform API"""
|
||||||
|
return await self._make_request('GET', endpoint, **kwargs)
|
||||||
|
|
||||||
|
async def post(self, endpoint: str, data: Dict[str, Any] = None, **kwargs) -> Dict[str, Any]:
|
||||||
|
"""POST request to platform API"""
|
||||||
|
if data:
|
||||||
|
kwargs['json'] = data
|
||||||
|
return await self._make_request('POST', endpoint, **kwargs)
|
||||||
|
|
||||||
|
async def put(self, endpoint: str, data: Dict[str, Any] = None, **kwargs) -> Dict[str, Any]:
|
||||||
|
"""PUT request to platform API"""
|
||||||
|
if data:
|
||||||
|
kwargs['json'] = data
|
||||||
|
return await self._make_request('PUT', endpoint, **kwargs)
|
||||||
|
|
||||||
|
async def delete(self, endpoint: str, **kwargs) -> Dict[str, Any]:
|
||||||
|
"""DELETE request to platform API"""
|
||||||
|
return await self._make_request('DELETE', endpoint, **kwargs)
|
||||||
|
|
||||||
|
# Platform-specific API methods
|
||||||
|
async def call_chatbot_api(self, chatbot_id: str, message: str,
|
||||||
|
context: Dict[str, Any] = None) -> Dict[str, Any]:
|
||||||
|
"""Consume platform chatbot API"""
|
||||||
|
return await self.post(
|
||||||
|
f"/api/v1/chatbot/external/{chatbot_id}/chat",
|
||||||
|
{
|
||||||
|
"message": message,
|
||||||
|
"context": context or {}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def call_llm_api(self, model: str, messages: List[Dict[str, Any]],
|
||||||
|
**kwargs) -> Dict[str, Any]:
|
||||||
|
"""Consume platform LLM API"""
|
||||||
|
return await self.post(
|
||||||
|
"/api/v1/llm/chat/completions",
|
||||||
|
{
|
||||||
|
"model": model,
|
||||||
|
"messages": messages,
|
||||||
|
**kwargs
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def search_rag(self, collection: str, query: str,
|
||||||
|
top_k: int = 5) -> Dict[str, Any]:
|
||||||
|
"""Consume platform RAG API"""
|
||||||
|
return await self.post(
|
||||||
|
f"/api/v1/rag/collections/{collection}/search",
|
||||||
|
{
|
||||||
|
"query": query,
|
||||||
|
"top_k": top_k
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_embeddings(self, model: str, input_text: str) -> Dict[str, Any]:
|
||||||
|
"""Generate embeddings via platform API"""
|
||||||
|
return await self.post(
|
||||||
|
"/api/v1/llm/embeddings",
|
||||||
|
{
|
||||||
|
"model": model,
|
||||||
|
"input": input_text
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PluginConfigManager:
|
||||||
|
"""Manages plugin configuration with validation and encryption"""
|
||||||
|
|
||||||
|
def __init__(self, plugin_id: str):
|
||||||
|
self.plugin_id = plugin_id
|
||||||
|
self.logger = get_logger(f"plugin.{plugin_id}.config")
|
||||||
|
|
||||||
|
async def get_config(self, user_id: Optional[str] = None) -> Dict[str, Any]:
|
||||||
|
"""Get plugin configuration for user (or default)"""
|
||||||
|
try:
|
||||||
|
# Use dependency injection to get database session
|
||||||
|
from app.db.database import SessionLocal
|
||||||
|
db = SessionLocal()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Query for active configuration
|
||||||
|
query = db.query(PluginConfiguration).filter(
|
||||||
|
PluginConfiguration.plugin_id == self.plugin_id,
|
||||||
|
PluginConfiguration.is_active == True
|
||||||
|
)
|
||||||
|
|
||||||
|
if user_id:
|
||||||
|
# Get user-specific configuration
|
||||||
|
query = query.filter(PluginConfiguration.user_id == user_id)
|
||||||
|
else:
|
||||||
|
# Get default configuration (is_default=True)
|
||||||
|
query = query.filter(PluginConfiguration.is_default == True)
|
||||||
|
|
||||||
|
config = query.first()
|
||||||
|
|
||||||
|
if config:
|
||||||
|
self.logger.debug(f"Retrieved configuration for plugin {self.plugin_id}, user {user_id}")
|
||||||
|
return config.config_data or {}
|
||||||
|
else:
|
||||||
|
self.logger.debug(f"No configuration found for plugin {self.plugin_id}, user {user_id}")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to get configuration: {e}")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
async def save_config(self, config: Dict[str, Any], user_id: str,
|
||||||
|
name: str = "Default Configuration",
|
||||||
|
description: str = None) -> bool:
|
||||||
|
"""Save plugin configuration for user"""
|
||||||
|
try:
|
||||||
|
from app.db.database import SessionLocal
|
||||||
|
db = SessionLocal()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if configuration already exists
|
||||||
|
existing_config = db.query(PluginConfiguration).filter(
|
||||||
|
PluginConfiguration.plugin_id == self.plugin_id,
|
||||||
|
PluginConfiguration.user_id == user_id,
|
||||||
|
PluginConfiguration.name == name
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if existing_config:
|
||||||
|
# Update existing configuration
|
||||||
|
existing_config.config_data = config
|
||||||
|
existing_config.description = description
|
||||||
|
existing_config.is_active = True
|
||||||
|
|
||||||
|
self.logger.info(f"Updated configuration for plugin {self.plugin_id}, user {user_id}")
|
||||||
|
else:
|
||||||
|
# Create new configuration
|
||||||
|
new_config = PluginConfiguration(
|
||||||
|
plugin_id=self.plugin_id,
|
||||||
|
user_id=user_id,
|
||||||
|
name=name,
|
||||||
|
description=description,
|
||||||
|
config_data=config,
|
||||||
|
is_active=True,
|
||||||
|
is_default=(name == "Default Configuration"),
|
||||||
|
created_by_user_id=user_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# If this is the first configuration for this user/plugin, make it default
|
||||||
|
existing_count = db.query(PluginConfiguration).filter(
|
||||||
|
PluginConfiguration.plugin_id == self.plugin_id,
|
||||||
|
PluginConfiguration.user_id == user_id
|
||||||
|
).count()
|
||||||
|
|
||||||
|
if existing_count == 0:
|
||||||
|
new_config.is_default = True
|
||||||
|
|
||||||
|
db.add(new_config)
|
||||||
|
self.logger.info(f"Created new configuration for plugin {self.plugin_id}, user {user_id}")
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
db.rollback()
|
||||||
|
self.logger.error(f"Database error saving configuration: {e}")
|
||||||
|
return False
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to save configuration: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def validate_config(self, config: Dict[str, Any],
|
||||||
|
schema: Dict[str, Any]) -> Tuple[bool, List[str]]:
|
||||||
|
"""Validate configuration against JSON schema"""
|
||||||
|
try:
|
||||||
|
import jsonschema
|
||||||
|
jsonschema.validate(config, schema)
|
||||||
|
return True, []
|
||||||
|
except jsonschema.ValidationError as e:
|
||||||
|
return False, [str(e)]
|
||||||
|
except Exception as e:
|
||||||
|
return False, [f"Schema validation error: {str(e)}"]
|
||||||
|
|
||||||
|
|
||||||
|
class PluginLogger:
|
||||||
|
"""Plugin-specific logger with security filtering"""
|
||||||
|
|
||||||
|
def __init__(self, plugin_id: str):
|
||||||
|
self.plugin_id = plugin_id
|
||||||
|
self.logger = get_logger(f"plugin.{plugin_id}")
|
||||||
|
|
||||||
|
# Sensitive data patterns to filter
|
||||||
|
self.sensitive_patterns = [
|
||||||
|
r'password', r'token', r'key', r'secret', r'api_key',
|
||||||
|
r'bearer', r'authorization', r'credential'
|
||||||
|
]
|
||||||
|
|
||||||
|
def _filter_sensitive_data(self, message: str) -> str:
|
||||||
|
"""Filter sensitive data from log messages"""
|
||||||
|
import re
|
||||||
|
filtered_message = message
|
||||||
|
for pattern in self.sensitive_patterns:
|
||||||
|
filtered_message = re.sub(
|
||||||
|
f'{pattern}[=:]\s*["\']?([^"\'\\s]+)["\']?',
|
||||||
|
f'{pattern}=***REDACTED***',
|
||||||
|
filtered_message,
|
||||||
|
flags=re.IGNORECASE
|
||||||
|
)
|
||||||
|
return filtered_message
|
||||||
|
|
||||||
|
def info(self, message: str, **kwargs):
|
||||||
|
"""Log info message with sensitive data filtering"""
|
||||||
|
filtered_message = self._filter_sensitive_data(message)
|
||||||
|
self.logger.info(f"[PLUGIN:{self.plugin_id}] {filtered_message}", **kwargs)
|
||||||
|
|
||||||
|
def warning(self, message: str, **kwargs):
|
||||||
|
"""Log warning message with sensitive data filtering"""
|
||||||
|
filtered_message = self._filter_sensitive_data(message)
|
||||||
|
self.logger.warning(f"[PLUGIN:{self.plugin_id}] {filtered_message}", **kwargs)
|
||||||
|
|
||||||
|
def error(self, message: str, **kwargs):
|
||||||
|
"""Log error message with sensitive data filtering"""
|
||||||
|
filtered_message = self._filter_sensitive_data(message)
|
||||||
|
self.logger.error(f"[PLUGIN:{self.plugin_id}] {filtered_message}", **kwargs)
|
||||||
|
|
||||||
|
def debug(self, message: str, **kwargs):
|
||||||
|
"""Log debug message with sensitive data filtering"""
|
||||||
|
filtered_message = self._filter_sensitive_data(message)
|
||||||
|
self.logger.debug(f"[PLUGIN:{self.plugin_id}] {filtered_message}", **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class BasePlugin(ABC):
|
||||||
|
"""Base class for all Enclava plugins with security and isolation"""
|
||||||
|
|
||||||
|
def __init__(self, manifest: PluginManifest, plugin_token: str):
|
||||||
|
self.manifest = manifest
|
||||||
|
self.plugin_id = manifest.metadata.name
|
||||||
|
self.version = manifest.metadata.version
|
||||||
|
|
||||||
|
# Initialize plugin services
|
||||||
|
self.api_client = PlatformAPIClient(self.plugin_id, plugin_token)
|
||||||
|
self.config = PluginConfigManager(self.plugin_id)
|
||||||
|
self.logger = PluginLogger(self.plugin_id)
|
||||||
|
|
||||||
|
# Plugin state
|
||||||
|
self.initialized = False
|
||||||
|
self._startup_time = time.time()
|
||||||
|
self._request_count = 0
|
||||||
|
self._error_count = 0
|
||||||
|
|
||||||
|
self.logger.info(f"Plugin {self.plugin_id} v{self.version} instantiated")
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_api_router(self) -> APIRouter:
|
||||||
|
"""Return FastAPI router for plugin endpoints"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def initialize(self) -> bool:
|
||||||
|
"""Initialize plugin resources and connections"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def cleanup(self) -> bool:
|
||||||
|
"""Cleanup plugin resources on shutdown"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def health_check(self) -> Dict[str, Any]:
|
||||||
|
"""Plugin health status"""
|
||||||
|
uptime = time.time() - self._startup_time
|
||||||
|
error_rate = self._error_count / max(self._request_count, 1)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "healthy" if error_rate < 0.1 else "warning",
|
||||||
|
"plugin": self.plugin_id,
|
||||||
|
"version": self.version,
|
||||||
|
"uptime_seconds": round(uptime, 2),
|
||||||
|
"request_count": self._request_count,
|
||||||
|
"error_count": self._error_count,
|
||||||
|
"error_rate": round(error_rate, 3),
|
||||||
|
"initialized": self.initialized
|
||||||
|
}
|
||||||
|
|
||||||
|
async def get_configuration_schema(self) -> Dict[str, Any]:
|
||||||
|
"""Return JSON schema for plugin configuration"""
|
||||||
|
return self.manifest.spec.config_schema
|
||||||
|
|
||||||
|
async def execute_cron_job(self, job_name: str) -> bool:
|
||||||
|
"""Execute scheduled cron job"""
|
||||||
|
self.logger.info(f"Executing cron job: {job_name}")
|
||||||
|
|
||||||
|
# Find job in manifest
|
||||||
|
job_spec = None
|
||||||
|
for job in self.manifest.spec.cron_jobs:
|
||||||
|
if job.name == job_name:
|
||||||
|
job_spec = job
|
||||||
|
break
|
||||||
|
|
||||||
|
if not job_spec:
|
||||||
|
self.logger.error(f"Cron job not found: {job_name}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get the function to execute
|
||||||
|
if hasattr(self, job_spec.function):
|
||||||
|
func = getattr(self, job_spec.function)
|
||||||
|
if asyncio.iscoroutinefunction(func):
|
||||||
|
result = await func()
|
||||||
|
else:
|
||||||
|
result = func()
|
||||||
|
|
||||||
|
self.logger.info(f"Cron job {job_name} completed successfully")
|
||||||
|
return bool(result)
|
||||||
|
else:
|
||||||
|
self.logger.error(f"Cron job function not found: {job_spec.function}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Cron job {job_name} failed: {e}")
|
||||||
|
self._error_count += 1
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_auth_context(self) -> PluginContext:
|
||||||
|
"""Dependency to get authentication context in API endpoints"""
|
||||||
|
async def _get_context(request: Request) -> PluginContext:
|
||||||
|
# Extract authentication info from request
|
||||||
|
# This would be populated by the plugin API gateway
|
||||||
|
return PluginContext(
|
||||||
|
user_id=request.headers.get('X-User-ID'),
|
||||||
|
api_key_id=request.headers.get('X-API-Key-ID'),
|
||||||
|
user_permissions=request.headers.get('X-User-Permissions', '').split(','),
|
||||||
|
ip_address=request.headers.get('X-Real-IP'),
|
||||||
|
user_agent=request.headers.get('User-Agent'),
|
||||||
|
request_id=request.headers.get('X-Request-ID')
|
||||||
|
)
|
||||||
|
|
||||||
|
return Depends(_get_context)
|
||||||
|
|
||||||
|
def _track_request(self, success: bool = True):
|
||||||
|
"""Track request metrics"""
|
||||||
|
self._request_count += 1
|
||||||
|
if not success:
|
||||||
|
self._error_count += 1
|
||||||
|
|
||||||
|
|
||||||
|
class PluginSecurityManager:
|
||||||
|
"""Manages plugin security and isolation"""
|
||||||
|
|
||||||
|
BLOCKED_IMPORTS = {
|
||||||
|
# Core platform modules
|
||||||
|
'app.db', 'app.models', 'app.core', 'app.services',
|
||||||
|
'sqlalchemy', 'alembic',
|
||||||
|
|
||||||
|
# Security sensitive
|
||||||
|
'subprocess', 'eval', 'exec', 'compile', '__import__',
|
||||||
|
'os.system', 'os.popen', 'os.spawn',
|
||||||
|
|
||||||
|
# System access
|
||||||
|
'socket', 'multiprocessing', 'threading'
|
||||||
|
}
|
||||||
|
|
||||||
|
ALLOWED_IMPORTS = {
|
||||||
|
# Standard library
|
||||||
|
'asyncio', 'aiohttp', 'json', 'datetime', 'typing', 'pydantic',
|
||||||
|
'logging', 'time', 'uuid', 'hashlib', 'base64', 'pathlib',
|
||||||
|
're', 'urllib.parse', 'dataclasses', 'enum',
|
||||||
|
|
||||||
|
# Approved third-party
|
||||||
|
'httpx', 'requests', 'pandas', 'numpy', 'yaml',
|
||||||
|
|
||||||
|
# Plugin framework
|
||||||
|
'app.services.base_plugin', 'app.schemas.plugin_manifest'
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate_plugin_import(cls, import_name: str) -> bool:
|
||||||
|
"""Validate if plugin can import a module"""
|
||||||
|
# Block dangerous imports
|
||||||
|
if any(import_name.startswith(blocked) for blocked in cls.BLOCKED_IMPORTS):
|
||||||
|
raise SecurityError(f"Import '{import_name}' not allowed in plugin environment")
|
||||||
|
|
||||||
|
# Allow explicit safe imports
|
||||||
|
if any(import_name.startswith(allowed) for allowed in cls.ALLOWED_IMPORTS):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Log potentially unsafe imports
|
||||||
|
logger = get_logger("plugin.security")
|
||||||
|
logger.warning(f"Potentially unsafe import in plugin: {import_name}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_plugin_sandbox(cls, plugin_id: str) -> Dict[str, Any]:
|
||||||
|
"""Create isolated environment for plugin execution"""
|
||||||
|
return {
|
||||||
|
'max_memory_mb': 128,
|
||||||
|
'max_cpu_percent': 25,
|
||||||
|
'max_disk_mb': 100,
|
||||||
|
'max_api_calls_per_minute': 100,
|
||||||
|
'allowed_domains': [], # Will be populated from manifest
|
||||||
|
'network_timeout_seconds': 30
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class PluginLoader:
|
||||||
|
"""Loads and validates plugins from directories"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.logger = get_logger("plugin.loader")
|
||||||
|
self.loaded_plugins: Dict[str, BasePlugin] = {}
|
||||||
|
|
||||||
|
async def load_plugin(self, plugin_dir: Path, plugin_token: str) -> BasePlugin:
|
||||||
|
"""Load a plugin from a directory"""
|
||||||
|
self.logger.info(f"Loading plugin from: {plugin_dir}")
|
||||||
|
|
||||||
|
# Load and validate manifest
|
||||||
|
manifest_path = plugin_dir / "manifest.yaml"
|
||||||
|
validation_result = validate_manifest_file(manifest_path)
|
||||||
|
|
||||||
|
if not validation_result["valid"]:
|
||||||
|
raise ValidationError(f"Invalid plugin manifest: {validation_result['errors']}")
|
||||||
|
|
||||||
|
manifest = validation_result["manifest"]
|
||||||
|
|
||||||
|
# Check compatibility
|
||||||
|
compatibility = validation_result["compatibility"]
|
||||||
|
if not compatibility["compatible"]:
|
||||||
|
raise ValidationError(f"Plugin incompatible: {compatibility['errors']}")
|
||||||
|
|
||||||
|
# Load plugin module
|
||||||
|
main_py_path = plugin_dir / "main.py"
|
||||||
|
spec = importlib.util.spec_from_file_location(
|
||||||
|
f"plugin_{manifest.metadata.name}",
|
||||||
|
main_py_path
|
||||||
|
)
|
||||||
|
|
||||||
|
if not spec or not spec.loader:
|
||||||
|
raise ValidationError(f"Cannot load plugin module: {main_py_path}")
|
||||||
|
|
||||||
|
# Security check before loading
|
||||||
|
self._validate_plugin_security(main_py_path)
|
||||||
|
|
||||||
|
# Load module
|
||||||
|
plugin_module = importlib.util.module_from_spec(spec)
|
||||||
|
|
||||||
|
# Add to sys.modules to allow imports
|
||||||
|
sys.modules[spec.name] = plugin_module
|
||||||
|
|
||||||
|
try:
|
||||||
|
spec.loader.exec_module(plugin_module)
|
||||||
|
except Exception as e:
|
||||||
|
raise ValidationError(f"Failed to execute plugin module: {e}")
|
||||||
|
|
||||||
|
# Find plugin class
|
||||||
|
plugin_class = None
|
||||||
|
for attr_name in dir(plugin_module):
|
||||||
|
attr = getattr(plugin_module, attr_name)
|
||||||
|
if (isinstance(attr, type) and
|
||||||
|
issubclass(attr, BasePlugin) and
|
||||||
|
attr is not BasePlugin):
|
||||||
|
plugin_class = attr
|
||||||
|
break
|
||||||
|
|
||||||
|
if not plugin_class:
|
||||||
|
raise ValidationError("Plugin must contain a class inheriting from BasePlugin")
|
||||||
|
|
||||||
|
# Instantiate plugin
|
||||||
|
plugin_instance = plugin_class(manifest, plugin_token)
|
||||||
|
|
||||||
|
# Initialize plugin
|
||||||
|
try:
|
||||||
|
await plugin_instance.initialize()
|
||||||
|
plugin_instance.initialized = True
|
||||||
|
except Exception as e:
|
||||||
|
raise ValidationError(f"Plugin initialization failed: {e}")
|
||||||
|
|
||||||
|
self.loaded_plugins[manifest.metadata.name] = plugin_instance
|
||||||
|
self.logger.info(f"Plugin {manifest.metadata.name} loaded successfully")
|
||||||
|
|
||||||
|
return plugin_instance
|
||||||
|
|
||||||
|
def _validate_plugin_security(self, main_py_path: Path):
|
||||||
|
"""Validate plugin code for security issues"""
|
||||||
|
with open(main_py_path, 'r', encoding='utf-8') as f:
|
||||||
|
code_content = f.read()
|
||||||
|
|
||||||
|
# Check for dangerous patterns
|
||||||
|
dangerous_patterns = [
|
||||||
|
'eval(', 'exec(', 'compile(',
|
||||||
|
'subprocess.', 'os.system', 'os.popen',
|
||||||
|
'__import__', 'importlib.import_module',
|
||||||
|
'from app.db', 'from app.models',
|
||||||
|
'sqlalchemy', 'SessionLocal'
|
||||||
|
]
|
||||||
|
|
||||||
|
for pattern in dangerous_patterns:
|
||||||
|
if pattern in code_content:
|
||||||
|
raise SecurityError(f"Dangerous pattern detected in plugin code: {pattern}")
|
||||||
|
|
||||||
|
async def unload_plugin(self, plugin_id: str) -> bool:
|
||||||
|
"""Unload a plugin and cleanup resources"""
|
||||||
|
if plugin_id not in self.loaded_plugins:
|
||||||
|
return False
|
||||||
|
|
||||||
|
plugin = self.loaded_plugins[plugin_id]
|
||||||
|
|
||||||
|
try:
|
||||||
|
await plugin.cleanup()
|
||||||
|
del self.loaded_plugins[plugin_id]
|
||||||
|
self.logger.info(f"Plugin {plugin_id} unloaded successfully")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error unloading plugin {plugin_id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_plugin(self, plugin_id: str) -> Optional[BasePlugin]:
|
||||||
|
"""Get loaded plugin by ID"""
|
||||||
|
return self.loaded_plugins.get(plugin_id)
|
||||||
|
|
||||||
|
def list_loaded_plugins(self) -> List[str]:
|
||||||
|
"""List all loaded plugin IDs"""
|
||||||
|
return list(self.loaded_plugins.keys())
|
||||||
21
backend/app/services/llm/__init__.py
Normal file
21
backend/app/services/llm/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
"""
|
||||||
|
LLM Service Package
|
||||||
|
|
||||||
|
Direct LLM integration without proxy dependencies.
|
||||||
|
Provides secure, efficient access to LLM providers with integrated security.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .service import LLMService
|
||||||
|
from .models import ChatRequest, ChatResponse, EmbeddingRequest, EmbeddingResponse
|
||||||
|
from .exceptions import LLMError, ProviderError, SecurityError
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"LLMService",
|
||||||
|
"ChatRequest",
|
||||||
|
"ChatResponse",
|
||||||
|
"EmbeddingRequest",
|
||||||
|
"EmbeddingResponse",
|
||||||
|
"LLMError",
|
||||||
|
"ProviderError",
|
||||||
|
"SecurityError"
|
||||||
|
]
|
||||||
285
backend/app/services/llm/config.py
Normal file
285
backend/app/services/llm/config.py
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
"""
|
||||||
|
LLM Service Configuration
|
||||||
|
|
||||||
|
Configuration management for LLM providers and service settings.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from typing import Dict, List, Optional, Any
|
||||||
|
from pydantic import BaseModel, Field, validator
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from .models import ResilienceConfig
|
||||||
|
|
||||||
|
|
||||||
|
class ProviderConfig(BaseModel):
|
||||||
|
"""Configuration for an LLM provider"""
|
||||||
|
name: str = Field(..., description="Provider name")
|
||||||
|
enabled: bool = Field(True, description="Whether provider is enabled")
|
||||||
|
base_url: str = Field(..., description="Provider base URL")
|
||||||
|
api_key_env_var: str = Field(..., description="Environment variable for API key")
|
||||||
|
default_model: Optional[str] = Field(None, description="Default model for this provider")
|
||||||
|
supported_models: List[str] = Field(default_factory=list, description="List of supported models")
|
||||||
|
capabilities: List[str] = Field(default_factory=list, description="Provider capabilities")
|
||||||
|
priority: int = Field(1, description="Provider priority (lower = higher priority)")
|
||||||
|
|
||||||
|
# Rate limiting
|
||||||
|
max_requests_per_minute: Optional[int] = Field(None, description="Max requests per minute")
|
||||||
|
max_requests_per_hour: Optional[int] = Field(None, description="Max requests per hour")
|
||||||
|
|
||||||
|
# Model-specific settings
|
||||||
|
supports_streaming: bool = Field(False, description="Whether provider supports streaming")
|
||||||
|
supports_function_calling: bool = Field(False, description="Whether provider supports function calling")
|
||||||
|
max_context_window: Optional[int] = Field(None, description="Maximum context window size")
|
||||||
|
max_output_tokens: Optional[int] = Field(None, description="Maximum output tokens")
|
||||||
|
|
||||||
|
# Resilience configuration
|
||||||
|
resilience: ResilienceConfig = Field(default_factory=ResilienceConfig, description="Resilience settings")
|
||||||
|
|
||||||
|
@validator('priority')
|
||||||
|
def validate_priority(cls, v):
|
||||||
|
if v < 1:
|
||||||
|
raise ValueError("Priority must be >= 1")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class LLMServiceConfig(BaseModel):
|
||||||
|
"""Main LLM service configuration"""
|
||||||
|
|
||||||
|
# Global settings
|
||||||
|
default_provider: str = Field("privatemode", description="Default provider to use")
|
||||||
|
enable_detailed_logging: bool = Field(False, description="Enable detailed request/response logging")
|
||||||
|
enable_security_checks: bool = Field(True, description="Enable security validation")
|
||||||
|
enable_metrics_collection: bool = Field(True, description="Enable metrics collection")
|
||||||
|
|
||||||
|
# Security settings
|
||||||
|
security_risk_threshold: float = Field(0.8, ge=0.0, le=1.0, description="Risk threshold for blocking")
|
||||||
|
security_warning_threshold: float = Field(0.6, ge=0.0, le=1.0, description="Risk threshold for warnings")
|
||||||
|
max_prompt_length: int = Field(50000, ge=1000, description="Maximum prompt length")
|
||||||
|
max_response_length: int = Field(32000, ge=1000, description="Maximum response length")
|
||||||
|
|
||||||
|
# Performance settings
|
||||||
|
default_timeout_ms: int = Field(30000, ge=1000, le=300000, description="Default request timeout")
|
||||||
|
max_concurrent_requests: int = Field(100, ge=1, le=1000, description="Maximum concurrent requests")
|
||||||
|
|
||||||
|
# Provider configurations
|
||||||
|
providers: Dict[str, ProviderConfig] = Field(default_factory=dict, description="Provider configurations")
|
||||||
|
|
||||||
|
# Model routing (model_name -> provider_name)
|
||||||
|
model_routing: Dict[str, str] = Field(default_factory=dict, description="Model to provider routing")
|
||||||
|
|
||||||
|
@validator('security_risk_threshold')
|
||||||
|
def validate_risk_threshold(cls, v, values):
|
||||||
|
warning_threshold = values.get('security_warning_threshold', 0.6)
|
||||||
|
if v <= warning_threshold:
|
||||||
|
raise ValueError("Risk threshold must be greater than warning threshold")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
def create_default_config() -> LLMServiceConfig:
|
||||||
|
"""Create default LLM service configuration"""
|
||||||
|
|
||||||
|
# PrivateMode.ai configuration (via proxy)
|
||||||
|
# Models will be fetched dynamically from proxy /models endpoint
|
||||||
|
privatemode_config = ProviderConfig(
|
||||||
|
name="privatemode",
|
||||||
|
enabled=True,
|
||||||
|
base_url=settings.PRIVATEMODE_PROXY_URL,
|
||||||
|
api_key_env_var="PRIVATEMODE_API_KEY",
|
||||||
|
default_model="privatemode-latest",
|
||||||
|
supported_models=[], # Will be populated dynamically from proxy
|
||||||
|
capabilities=["chat", "embeddings", "tee"],
|
||||||
|
priority=1,
|
||||||
|
max_requests_per_minute=100,
|
||||||
|
max_requests_per_hour=2000,
|
||||||
|
supports_streaming=True,
|
||||||
|
supports_function_calling=True,
|
||||||
|
max_context_window=128000,
|
||||||
|
max_output_tokens=8192,
|
||||||
|
resilience=ResilienceConfig(
|
||||||
|
max_retries=3,
|
||||||
|
retry_delay_ms=1000,
|
||||||
|
timeout_ms=60000, # PrivateMode may be slower due to TEE
|
||||||
|
circuit_breaker_threshold=5,
|
||||||
|
circuit_breaker_reset_timeout_ms=120000
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create main configuration
|
||||||
|
config = LLMServiceConfig(
|
||||||
|
default_provider="privatemode",
|
||||||
|
enable_detailed_logging=settings.LOG_LLM_PROMPTS,
|
||||||
|
enable_security_checks=settings.API_SECURITY_ENABLED,
|
||||||
|
security_risk_threshold=settings.API_SECURITY_RISK_THRESHOLD,
|
||||||
|
security_warning_threshold=settings.API_SECURITY_WARNING_THRESHOLD,
|
||||||
|
providers={
|
||||||
|
"privatemode": privatemode_config
|
||||||
|
},
|
||||||
|
model_routing={} # Will be populated dynamically from provider models
|
||||||
|
)
|
||||||
|
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class EnvironmentVariables:
|
||||||
|
"""Environment variables used by LLM service"""
|
||||||
|
|
||||||
|
# Encryption
|
||||||
|
LLM_ENCRYPTION_KEY: Optional[str] = None
|
||||||
|
|
||||||
|
# Provider API keys
|
||||||
|
PRIVATEMODE_API_KEY: Optional[str] = None
|
||||||
|
OPENAI_API_KEY: Optional[str] = None
|
||||||
|
ANTHROPIC_API_KEY: Optional[str] = None
|
||||||
|
GOOGLE_API_KEY: Optional[str] = None
|
||||||
|
|
||||||
|
# Service settings
|
||||||
|
LOG_LLM_PROMPTS: bool = False
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
"""Load values from environment"""
|
||||||
|
self.LLM_ENCRYPTION_KEY = os.getenv("LLM_ENCRYPTION_KEY")
|
||||||
|
self.PRIVATEMODE_API_KEY = os.getenv("PRIVATEMODE_API_KEY")
|
||||||
|
self.OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
||||||
|
self.ANTHROPIC_API_KEY = os.getenv("ANTHROPIC_API_KEY")
|
||||||
|
self.GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
|
||||||
|
self.LOG_LLM_PROMPTS = os.getenv("LOG_LLM_PROMPTS", "false").lower() == "true"
|
||||||
|
|
||||||
|
def get_api_key(self, provider_name: str) -> Optional[str]:
|
||||||
|
"""Get API key for a specific provider"""
|
||||||
|
key_mapping = {
|
||||||
|
"privatemode": self.PRIVATEMODE_API_KEY,
|
||||||
|
"openai": self.OPENAI_API_KEY,
|
||||||
|
"anthropic": self.ANTHROPIC_API_KEY,
|
||||||
|
"google": self.GOOGLE_API_KEY
|
||||||
|
}
|
||||||
|
|
||||||
|
return key_mapping.get(provider_name.lower())
|
||||||
|
|
||||||
|
def validate_required_keys(self, enabled_providers: List[str]) -> List[str]:
|
||||||
|
"""Validate that required API keys are present"""
|
||||||
|
missing_keys = []
|
||||||
|
|
||||||
|
for provider in enabled_providers:
|
||||||
|
if not self.get_api_key(provider):
|
||||||
|
missing_keys.append(f"{provider.upper()}_API_KEY")
|
||||||
|
|
||||||
|
return missing_keys
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationManager:
|
||||||
|
"""Manages LLM service configuration"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._config: Optional[LLMServiceConfig] = None
|
||||||
|
self._env_vars = EnvironmentVariables()
|
||||||
|
|
||||||
|
def get_config(self) -> LLMServiceConfig:
|
||||||
|
"""Get current configuration"""
|
||||||
|
if self._config is None:
|
||||||
|
self._config = create_default_config()
|
||||||
|
self._validate_configuration()
|
||||||
|
|
||||||
|
return self._config
|
||||||
|
|
||||||
|
def update_config(self, config: LLMServiceConfig):
|
||||||
|
"""Update configuration"""
|
||||||
|
self._config = config
|
||||||
|
self._validate_configuration()
|
||||||
|
|
||||||
|
def get_provider_config(self, provider_name: str) -> Optional[ProviderConfig]:
|
||||||
|
"""Get configuration for a specific provider"""
|
||||||
|
config = self.get_config()
|
||||||
|
return config.providers.get(provider_name)
|
||||||
|
|
||||||
|
def get_provider_for_model(self, model_name: str) -> Optional[str]:
|
||||||
|
"""Get provider name for a specific model"""
|
||||||
|
config = self.get_config()
|
||||||
|
return config.model_routing.get(model_name)
|
||||||
|
|
||||||
|
def get_enabled_providers(self) -> List[str]:
|
||||||
|
"""Get list of enabled providers"""
|
||||||
|
config = self.get_config()
|
||||||
|
return [name for name, provider in config.providers.items() if provider.enabled]
|
||||||
|
|
||||||
|
def get_api_key(self, provider_name: str, encrypted: bool = False) -> Optional[str]:
|
||||||
|
"""Get API key for provider"""
|
||||||
|
api_key = self._env_vars.get_api_key(provider_name)
|
||||||
|
|
||||||
|
if api_key and encrypted:
|
||||||
|
from .security import security_manager
|
||||||
|
return security_manager.encrypt_api_key(api_key)
|
||||||
|
|
||||||
|
return api_key
|
||||||
|
|
||||||
|
def _validate_configuration(self):
|
||||||
|
"""Validate current configuration"""
|
||||||
|
if not self._config:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check for enabled providers without API keys
|
||||||
|
enabled_providers = self.get_enabled_providers()
|
||||||
|
missing_keys = self._env_vars.validate_required_keys(enabled_providers)
|
||||||
|
|
||||||
|
if missing_keys:
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.warning(f"Missing API keys for enabled providers: {', '.join(missing_keys)}")
|
||||||
|
|
||||||
|
# Validate default provider is enabled
|
||||||
|
default_provider = self._config.default_provider
|
||||||
|
if default_provider not in enabled_providers:
|
||||||
|
raise ValueError(f"Default provider '{default_provider}' is not enabled")
|
||||||
|
|
||||||
|
# Validate model routing points to enabled providers
|
||||||
|
invalid_routes = []
|
||||||
|
for model, provider in self._config.model_routing.items():
|
||||||
|
if provider not in enabled_providers:
|
||||||
|
invalid_routes.append(f"{model} -> {provider}")
|
||||||
|
|
||||||
|
if invalid_routes:
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.warning(f"Model routes point to disabled providers: {', '.join(invalid_routes)}")
|
||||||
|
|
||||||
|
async def refresh_provider_models(self, provider_name: str, models: List[str]):
|
||||||
|
"""Update supported models for a provider dynamically"""
|
||||||
|
if not self._config:
|
||||||
|
return
|
||||||
|
|
||||||
|
provider_config = self._config.providers.get(provider_name)
|
||||||
|
if not provider_config:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Update supported models
|
||||||
|
provider_config.supported_models = models
|
||||||
|
|
||||||
|
# Update model routing - map all models to this provider
|
||||||
|
for model in models:
|
||||||
|
self._config.model_routing[model] = provider_name
|
||||||
|
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.info(f"Updated {provider_name} with {len(models)} models: {models}")
|
||||||
|
|
||||||
|
async def get_all_available_models(self) -> Dict[str, List[str]]:
|
||||||
|
"""Get all available models grouped by provider"""
|
||||||
|
config = self.get_config()
|
||||||
|
models_by_provider = {}
|
||||||
|
|
||||||
|
for provider_name, provider_config in config.providers.items():
|
||||||
|
if provider_config.enabled:
|
||||||
|
models_by_provider[provider_name] = provider_config.supported_models
|
||||||
|
|
||||||
|
return models_by_provider
|
||||||
|
|
||||||
|
def get_model_provider_mapping(self) -> Dict[str, str]:
|
||||||
|
"""Get current model to provider mapping"""
|
||||||
|
config = self.get_config()
|
||||||
|
return config.model_routing.copy()
|
||||||
|
|
||||||
|
|
||||||
|
# Global configuration manager
|
||||||
|
config_manager = ConfigurationManager()
|
||||||
62
backend/app/services/llm/exceptions.py
Normal file
62
backend/app/services/llm/exceptions.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
"""
|
||||||
|
LLM Service Exceptions
|
||||||
|
|
||||||
|
Custom exceptions for LLM service operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class LLMError(Exception):
|
||||||
|
"""Base exception for LLM service errors"""
|
||||||
|
|
||||||
|
def __init__(self, message: str, error_code: str = "LLM_ERROR", details: dict = None):
|
||||||
|
super().__init__(message)
|
||||||
|
self.message = message
|
||||||
|
self.error_code = error_code
|
||||||
|
self.details = details or {}
|
||||||
|
|
||||||
|
|
||||||
|
class ProviderError(LLMError):
|
||||||
|
"""Exception for LLM provider-specific errors"""
|
||||||
|
|
||||||
|
def __init__(self, message: str, provider: str, error_code: str = "PROVIDER_ERROR", details: dict = None):
|
||||||
|
super().__init__(message, error_code, details)
|
||||||
|
self.provider = provider
|
||||||
|
|
||||||
|
|
||||||
|
class SecurityError(LLMError):
|
||||||
|
"""Exception for security-related errors"""
|
||||||
|
|
||||||
|
def __init__(self, message: str, risk_score: float = 0.0, error_code: str = "SECURITY_ERROR", details: dict = None):
|
||||||
|
super().__init__(message, error_code, details)
|
||||||
|
self.risk_score = risk_score
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationError(LLMError):
|
||||||
|
"""Exception for configuration-related errors"""
|
||||||
|
|
||||||
|
def __init__(self, message: str, error_code: str = "CONFIG_ERROR", details: dict = None):
|
||||||
|
super().__init__(message, error_code, details)
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimitError(LLMError):
|
||||||
|
"""Exception for rate limiting errors"""
|
||||||
|
|
||||||
|
def __init__(self, message: str, retry_after: int = None, error_code: str = "RATE_LIMIT_ERROR", details: dict = None):
|
||||||
|
super().__init__(message, error_code, details)
|
||||||
|
self.retry_after = retry_after
|
||||||
|
|
||||||
|
|
||||||
|
class TimeoutError(LLMError):
|
||||||
|
"""Exception for timeout errors"""
|
||||||
|
|
||||||
|
def __init__(self, message: str, timeout_duration: float = None, error_code: str = "TIMEOUT_ERROR", details: dict = None):
|
||||||
|
super().__init__(message, error_code, details)
|
||||||
|
self.timeout_duration = timeout_duration
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationError(LLMError):
|
||||||
|
"""Exception for request validation errors"""
|
||||||
|
|
||||||
|
def __init__(self, message: str, field: str = None, error_code: str = "VALIDATION_ERROR", details: dict = None):
|
||||||
|
super().__init__(message, error_code, details)
|
||||||
|
self.field = field
|
||||||
319
backend/app/services/llm/metrics.py
Normal file
319
backend/app/services/llm/metrics.py
Normal file
@@ -0,0 +1,319 @@
|
|||||||
|
"""
|
||||||
|
LLM Service Metrics Collection
|
||||||
|
|
||||||
|
Collects and manages metrics for LLM operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
from typing import Dict, Any, Optional, List
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from collections import defaultdict, deque
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from .models import LLMMetrics
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class RequestMetric:
|
||||||
|
"""Individual request metric"""
|
||||||
|
timestamp: datetime
|
||||||
|
provider: str
|
||||||
|
model: str
|
||||||
|
request_type: str # chat, embedding, etc.
|
||||||
|
success: bool
|
||||||
|
latency_ms: float
|
||||||
|
token_usage: Optional[Dict[str, int]] = None
|
||||||
|
security_risk_score: float = 0.0
|
||||||
|
error_code: Optional[str] = None
|
||||||
|
user_id: Optional[str] = None
|
||||||
|
api_key_id: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
|
class MetricsCollector:
|
||||||
|
"""Collects and aggregates LLM service metrics"""
|
||||||
|
|
||||||
|
def __init__(self, max_history_size: int = 10000):
|
||||||
|
"""
|
||||||
|
Initialize metrics collector
|
||||||
|
|
||||||
|
Args:
|
||||||
|
max_history_size: Maximum number of metrics to keep in memory
|
||||||
|
"""
|
||||||
|
self.max_history_size = max_history_size
|
||||||
|
self._metrics: deque = deque(maxlen=max_history_size)
|
||||||
|
self._provider_metrics: Dict[str, deque] = defaultdict(lambda: deque(maxlen=1000))
|
||||||
|
self._lock = threading.RLock()
|
||||||
|
|
||||||
|
# Aggregated metrics cache
|
||||||
|
self._cache_timestamp: Optional[datetime] = None
|
||||||
|
self._cached_metrics: Optional[LLMMetrics] = None
|
||||||
|
self._cache_ttl_seconds = 60 # Cache for 1 minute
|
||||||
|
|
||||||
|
logger.info(f"Metrics collector initialized with max history: {max_history_size}")
|
||||||
|
|
||||||
|
def record_request(
|
||||||
|
self,
|
||||||
|
provider: str,
|
||||||
|
model: str,
|
||||||
|
request_type: str,
|
||||||
|
success: bool,
|
||||||
|
latency_ms: float,
|
||||||
|
token_usage: Optional[Dict[str, int]] = None,
|
||||||
|
security_risk_score: float = 0.0,
|
||||||
|
error_code: Optional[str] = None,
|
||||||
|
user_id: Optional[str] = None,
|
||||||
|
api_key_id: Optional[int] = None
|
||||||
|
):
|
||||||
|
"""Record a request metric"""
|
||||||
|
metric = RequestMetric(
|
||||||
|
timestamp=datetime.utcnow(),
|
||||||
|
provider=provider,
|
||||||
|
model=model,
|
||||||
|
request_type=request_type,
|
||||||
|
success=success,
|
||||||
|
latency_ms=latency_ms,
|
||||||
|
token_usage=token_usage,
|
||||||
|
security_risk_score=security_risk_score,
|
||||||
|
error_code=error_code,
|
||||||
|
user_id=user_id,
|
||||||
|
api_key_id=api_key_id
|
||||||
|
)
|
||||||
|
|
||||||
|
with self._lock:
|
||||||
|
self._metrics.append(metric)
|
||||||
|
self._provider_metrics[provider].append(metric)
|
||||||
|
|
||||||
|
# Invalidate cache
|
||||||
|
self._cached_metrics = None
|
||||||
|
self._cache_timestamp = None
|
||||||
|
|
||||||
|
# Log significant events
|
||||||
|
if not success:
|
||||||
|
logger.warning(f"Request failed: {provider}/{model} - {error_code or 'Unknown error'}")
|
||||||
|
elif security_risk_score > 0.6:
|
||||||
|
logger.info(f"High risk request: {provider}/{model} - risk score: {security_risk_score:.3f}")
|
||||||
|
|
||||||
|
def get_metrics(self, force_refresh: bool = False) -> LLMMetrics:
|
||||||
|
"""Get aggregated metrics"""
|
||||||
|
with self._lock:
|
||||||
|
# Check cache validity
|
||||||
|
if (not force_refresh and
|
||||||
|
self._cached_metrics and
|
||||||
|
self._cache_timestamp and
|
||||||
|
(datetime.utcnow() - self._cache_timestamp).total_seconds() < self._cache_ttl_seconds):
|
||||||
|
return self._cached_metrics
|
||||||
|
|
||||||
|
# Calculate fresh metrics
|
||||||
|
metrics = self._calculate_metrics()
|
||||||
|
|
||||||
|
# Cache results
|
||||||
|
self._cached_metrics = metrics
|
||||||
|
self._cache_timestamp = datetime.utcnow()
|
||||||
|
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
def _calculate_metrics(self) -> LLMMetrics:
|
||||||
|
"""Calculate aggregated metrics from recorded data"""
|
||||||
|
if not self._metrics:
|
||||||
|
return LLMMetrics()
|
||||||
|
|
||||||
|
total_requests = len(self._metrics)
|
||||||
|
successful_requests = sum(1 for m in self._metrics if m.success)
|
||||||
|
failed_requests = total_requests - successful_requests
|
||||||
|
security_blocked = sum(1 for m in self._metrics if not m.success and m.security_risk_score > 0.8)
|
||||||
|
|
||||||
|
# Calculate averages
|
||||||
|
latencies = [m.latency_ms for m in self._metrics if m.latency_ms > 0]
|
||||||
|
risk_scores = [m.security_risk_score for m in self._metrics]
|
||||||
|
|
||||||
|
avg_latency = sum(latencies) / len(latencies) if latencies else 0.0
|
||||||
|
avg_risk_score = sum(risk_scores) / len(risk_scores) if risk_scores else 0.0
|
||||||
|
|
||||||
|
# Provider-specific metrics
|
||||||
|
provider_metrics = {}
|
||||||
|
for provider, provider_data in self._provider_metrics.items():
|
||||||
|
if provider_data:
|
||||||
|
provider_metrics[provider] = self._calculate_provider_metrics(provider_data)
|
||||||
|
|
||||||
|
return LLMMetrics(
|
||||||
|
total_requests=total_requests,
|
||||||
|
successful_requests=successful_requests,
|
||||||
|
failed_requests=failed_requests,
|
||||||
|
security_blocked_requests=security_blocked,
|
||||||
|
average_latency_ms=avg_latency,
|
||||||
|
average_risk_score=avg_risk_score,
|
||||||
|
provider_metrics=provider_metrics,
|
||||||
|
last_updated=datetime.utcnow()
|
||||||
|
)
|
||||||
|
|
||||||
|
def _calculate_provider_metrics(self, provider_data: deque) -> Dict[str, Any]:
|
||||||
|
"""Calculate metrics for a specific provider"""
|
||||||
|
if not provider_data:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
total = len(provider_data)
|
||||||
|
successful = sum(1 for m in provider_data if m.success)
|
||||||
|
failed = total - successful
|
||||||
|
|
||||||
|
latencies = [m.latency_ms for m in provider_data if m.latency_ms > 0]
|
||||||
|
avg_latency = sum(latencies) / len(latencies) if latencies else 0.0
|
||||||
|
|
||||||
|
# Token usage aggregation
|
||||||
|
total_prompt_tokens = 0
|
||||||
|
total_completion_tokens = 0
|
||||||
|
total_tokens = 0
|
||||||
|
|
||||||
|
for metric in provider_data:
|
||||||
|
if metric.token_usage:
|
||||||
|
total_prompt_tokens += metric.token_usage.get("prompt_tokens", 0)
|
||||||
|
total_completion_tokens += metric.token_usage.get("completion_tokens", 0)
|
||||||
|
total_tokens += metric.token_usage.get("total_tokens", 0)
|
||||||
|
|
||||||
|
# Model distribution
|
||||||
|
model_counts = defaultdict(int)
|
||||||
|
for metric in provider_data:
|
||||||
|
model_counts[metric.model] += 1
|
||||||
|
|
||||||
|
# Request type distribution
|
||||||
|
request_type_counts = defaultdict(int)
|
||||||
|
for metric in provider_data:
|
||||||
|
request_type_counts[metric.request_type] += 1
|
||||||
|
|
||||||
|
# Error analysis
|
||||||
|
error_counts = defaultdict(int)
|
||||||
|
for metric in provider_data:
|
||||||
|
if not metric.success and metric.error_code:
|
||||||
|
error_counts[metric.error_code] += 1
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total_requests": total,
|
||||||
|
"successful_requests": successful,
|
||||||
|
"failed_requests": failed,
|
||||||
|
"success_rate": successful / total if total > 0 else 0.0,
|
||||||
|
"average_latency_ms": avg_latency,
|
||||||
|
"token_usage": {
|
||||||
|
"total_prompt_tokens": total_prompt_tokens,
|
||||||
|
"total_completion_tokens": total_completion_tokens,
|
||||||
|
"total_tokens": total_tokens,
|
||||||
|
"avg_prompt_tokens": total_prompt_tokens / total if total > 0 else 0,
|
||||||
|
"avg_completion_tokens": total_completion_tokens / successful if successful > 0 else 0
|
||||||
|
},
|
||||||
|
"model_distribution": dict(model_counts),
|
||||||
|
"request_type_distribution": dict(request_type_counts),
|
||||||
|
"error_distribution": dict(error_counts),
|
||||||
|
"recent_requests": total
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_provider_metrics(self, provider: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get metrics for a specific provider"""
|
||||||
|
with self._lock:
|
||||||
|
if provider not in self._provider_metrics:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self._calculate_provider_metrics(self._provider_metrics[provider])
|
||||||
|
|
||||||
|
def get_recent_metrics(self, minutes: int = 5) -> List[RequestMetric]:
|
||||||
|
"""Get metrics from the last N minutes"""
|
||||||
|
cutoff_time = datetime.utcnow() - timedelta(minutes=minutes)
|
||||||
|
|
||||||
|
with self._lock:
|
||||||
|
return [m for m in self._metrics if m.timestamp >= cutoff_time]
|
||||||
|
|
||||||
|
def get_error_metrics(self, hours: int = 1) -> Dict[str, int]:
|
||||||
|
"""Get error distribution from the last N hours"""
|
||||||
|
cutoff_time = datetime.utcnow() - timedelta(hours=hours)
|
||||||
|
error_counts = defaultdict(int)
|
||||||
|
|
||||||
|
with self._lock:
|
||||||
|
for metric in self._metrics:
|
||||||
|
if metric.timestamp >= cutoff_time and not metric.success and metric.error_code:
|
||||||
|
error_counts[metric.error_code] += 1
|
||||||
|
|
||||||
|
return dict(error_counts)
|
||||||
|
|
||||||
|
def get_performance_metrics(self, minutes: int = 15) -> Dict[str, Dict[str, float]]:
|
||||||
|
"""Get performance metrics by provider from the last N minutes"""
|
||||||
|
cutoff_time = datetime.utcnow() - timedelta(minutes=minutes)
|
||||||
|
provider_perf = defaultdict(list)
|
||||||
|
|
||||||
|
with self._lock:
|
||||||
|
for metric in self._metrics:
|
||||||
|
if metric.timestamp >= cutoff_time and metric.success:
|
||||||
|
provider_perf[metric.provider].append(metric.latency_ms)
|
||||||
|
|
||||||
|
performance = {}
|
||||||
|
for provider, latencies in provider_perf.items():
|
||||||
|
if latencies:
|
||||||
|
performance[provider] = {
|
||||||
|
"avg_latency_ms": sum(latencies) / len(latencies),
|
||||||
|
"min_latency_ms": min(latencies),
|
||||||
|
"max_latency_ms": max(latencies),
|
||||||
|
"p95_latency_ms": self._percentile(latencies, 95),
|
||||||
|
"p99_latency_ms": self._percentile(latencies, 99),
|
||||||
|
"request_count": len(latencies)
|
||||||
|
}
|
||||||
|
|
||||||
|
return performance
|
||||||
|
|
||||||
|
def _percentile(self, data: List[float], percentile: int) -> float:
|
||||||
|
"""Calculate percentile of a list of numbers"""
|
||||||
|
if not data:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
sorted_data = sorted(data)
|
||||||
|
index = (percentile / 100.0) * (len(sorted_data) - 1)
|
||||||
|
|
||||||
|
if index.is_integer():
|
||||||
|
return sorted_data[int(index)]
|
||||||
|
else:
|
||||||
|
lower = sorted_data[int(index)]
|
||||||
|
upper = sorted_data[int(index) + 1]
|
||||||
|
return lower + (upper - lower) * (index - int(index))
|
||||||
|
|
||||||
|
def clear_metrics(self):
|
||||||
|
"""Clear all metrics (use with caution)"""
|
||||||
|
with self._lock:
|
||||||
|
self._metrics.clear()
|
||||||
|
self._provider_metrics.clear()
|
||||||
|
self._cached_metrics = None
|
||||||
|
self._cache_timestamp = None
|
||||||
|
|
||||||
|
logger.info("All metrics cleared")
|
||||||
|
|
||||||
|
def get_health_summary(self) -> Dict[str, Any]:
|
||||||
|
"""Get a health summary for monitoring"""
|
||||||
|
metrics = self.get_metrics()
|
||||||
|
recent_metrics = self.get_recent_metrics(minutes=5)
|
||||||
|
error_metrics = self.get_error_metrics(hours=1)
|
||||||
|
|
||||||
|
# Calculate health scores
|
||||||
|
total_recent = len(recent_metrics)
|
||||||
|
successful_recent = sum(1 for m in recent_metrics if m.success)
|
||||||
|
success_rate = successful_recent / total_recent if total_recent > 0 else 1.0
|
||||||
|
|
||||||
|
# Determine health status
|
||||||
|
if success_rate >= 0.95:
|
||||||
|
health_status = "healthy"
|
||||||
|
elif success_rate >= 0.80:
|
||||||
|
health_status = "degraded"
|
||||||
|
else:
|
||||||
|
health_status = "unhealthy"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"health_status": health_status,
|
||||||
|
"success_rate_5min": success_rate,
|
||||||
|
"total_requests_5min": total_recent,
|
||||||
|
"average_latency_ms": metrics.average_latency_ms,
|
||||||
|
"error_count_1hour": sum(error_metrics.values()),
|
||||||
|
"top_errors": dict(sorted(error_metrics.items(), key=lambda x: x[1], reverse=True)[:5]),
|
||||||
|
"provider_count": len(metrics.provider_metrics),
|
||||||
|
"last_updated": datetime.utcnow().isoformat()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Global metrics collector instance
|
||||||
|
metrics_collector = MetricsCollector()
|
||||||
173
backend/app/services/llm/models.py
Normal file
173
backend/app/services/llm/models.py
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
"""
|
||||||
|
LLM Service Data Models
|
||||||
|
|
||||||
|
Pydantic models for LLM requests and responses.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, List, Optional, Any, Union
|
||||||
|
from pydantic import BaseModel, Field, validator
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class ChatMessage(BaseModel):
|
||||||
|
"""Individual chat message"""
|
||||||
|
role: str = Field(..., description="Message role (system, user, assistant)")
|
||||||
|
content: str = Field(..., description="Message content")
|
||||||
|
name: Optional[str] = Field(None, description="Optional message name")
|
||||||
|
|
||||||
|
@validator('role')
|
||||||
|
def validate_role(cls, v):
|
||||||
|
allowed_roles = {'system', 'user', 'assistant', 'function'}
|
||||||
|
if v not in allowed_roles:
|
||||||
|
raise ValueError(f"Role must be one of {allowed_roles}")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class ChatRequest(BaseModel):
|
||||||
|
"""Chat completion request"""
|
||||||
|
model: str = Field(..., description="Model identifier")
|
||||||
|
messages: List[ChatMessage] = Field(..., description="Chat messages")
|
||||||
|
temperature: Optional[float] = Field(0.7, ge=0.0, le=2.0, description="Sampling temperature")
|
||||||
|
max_tokens: Optional[int] = Field(None, ge=1, le=32000, description="Maximum tokens to generate")
|
||||||
|
top_p: Optional[float] = Field(1.0, ge=0.0, le=1.0, description="Nucleus sampling parameter")
|
||||||
|
top_k: Optional[int] = Field(None, ge=1, description="Top-k sampling parameter")
|
||||||
|
frequency_penalty: Optional[float] = Field(0.0, ge=-2.0, le=2.0, description="Frequency penalty")
|
||||||
|
presence_penalty: Optional[float] = Field(0.0, ge=-2.0, le=2.0, description="Presence penalty")
|
||||||
|
stop: Optional[Union[str, List[str]]] = Field(None, description="Stop sequences")
|
||||||
|
stream: Optional[bool] = Field(False, description="Stream response")
|
||||||
|
user_id: str = Field(..., description="User identifier")
|
||||||
|
api_key_id: int = Field(..., description="API key identifier")
|
||||||
|
metadata: Optional[Dict[str, Any]] = Field(None, description="Additional metadata")
|
||||||
|
|
||||||
|
@validator('messages')
|
||||||
|
def validate_messages(cls, v):
|
||||||
|
if not v:
|
||||||
|
raise ValueError("Messages cannot be empty")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class TokenUsage(BaseModel):
|
||||||
|
"""Token usage information"""
|
||||||
|
prompt_tokens: int = Field(..., description="Tokens in the prompt")
|
||||||
|
completion_tokens: int = Field(..., description="Tokens in the completion")
|
||||||
|
total_tokens: int = Field(..., description="Total tokens used")
|
||||||
|
|
||||||
|
|
||||||
|
class ChatChoice(BaseModel):
|
||||||
|
"""Chat completion choice"""
|
||||||
|
index: int = Field(..., description="Choice index")
|
||||||
|
message: ChatMessage = Field(..., description="Generated message")
|
||||||
|
finish_reason: Optional[str] = Field(None, description="Reason for completion finish")
|
||||||
|
|
||||||
|
|
||||||
|
class ChatResponse(BaseModel):
|
||||||
|
"""Chat completion response"""
|
||||||
|
id: str = Field(..., description="Response identifier")
|
||||||
|
object: str = Field("chat.completion", description="Object type")
|
||||||
|
created: int = Field(..., description="Creation timestamp")
|
||||||
|
model: str = Field(..., description="Model used")
|
||||||
|
provider: str = Field(..., description="Provider used")
|
||||||
|
choices: List[ChatChoice] = Field(..., description="Generated choices")
|
||||||
|
usage: Optional[TokenUsage] = Field(None, description="Token usage")
|
||||||
|
system_fingerprint: Optional[str] = Field(None, description="System fingerprint")
|
||||||
|
|
||||||
|
# Security and audit information
|
||||||
|
security_check: bool = Field(..., description="Whether security check passed")
|
||||||
|
risk_score: float = Field(..., description="Security risk score")
|
||||||
|
detected_patterns: List[str] = Field(default_factory=list, description="Detected security patterns")
|
||||||
|
|
||||||
|
# Performance metrics
|
||||||
|
latency_ms: Optional[float] = Field(None, description="Response latency in milliseconds")
|
||||||
|
provider_latency_ms: Optional[float] = Field(None, description="Provider-specific latency")
|
||||||
|
|
||||||
|
|
||||||
|
class EmbeddingRequest(BaseModel):
|
||||||
|
"""Embedding generation request"""
|
||||||
|
model: str = Field(..., description="Embedding model identifier")
|
||||||
|
input: Union[str, List[str]] = Field(..., description="Text to embed")
|
||||||
|
encoding_format: Optional[str] = Field("float", description="Encoding format")
|
||||||
|
dimensions: Optional[int] = Field(None, ge=1, description="Number of dimensions")
|
||||||
|
user_id: str = Field(..., description="User identifier")
|
||||||
|
api_key_id: int = Field(..., description="API key identifier")
|
||||||
|
metadata: Optional[Dict[str, Any]] = Field(None, description="Additional metadata")
|
||||||
|
|
||||||
|
@validator('input')
|
||||||
|
def validate_input(cls, v):
|
||||||
|
if isinstance(v, str):
|
||||||
|
if not v.strip():
|
||||||
|
raise ValueError("Input text cannot be empty")
|
||||||
|
elif isinstance(v, list):
|
||||||
|
if not v or not all(isinstance(item, str) and item.strip() for item in v):
|
||||||
|
raise ValueError("Input list cannot be empty and must contain non-empty strings")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class EmbeddingData(BaseModel):
|
||||||
|
"""Single embedding data"""
|
||||||
|
object: str = Field("embedding", description="Object type")
|
||||||
|
index: int = Field(..., description="Embedding index")
|
||||||
|
embedding: List[float] = Field(..., description="Embedding vector")
|
||||||
|
|
||||||
|
|
||||||
|
class EmbeddingResponse(BaseModel):
|
||||||
|
"""Embedding generation response"""
|
||||||
|
object: str = Field("list", description="Object type")
|
||||||
|
data: List[EmbeddingData] = Field(..., description="Embedding data")
|
||||||
|
model: str = Field(..., description="Model used")
|
||||||
|
provider: str = Field(..., description="Provider used")
|
||||||
|
usage: Optional[TokenUsage] = Field(None, description="Token usage")
|
||||||
|
|
||||||
|
# Security and audit information
|
||||||
|
security_check: bool = Field(..., description="Whether security check passed")
|
||||||
|
risk_score: float = Field(..., description="Security risk score")
|
||||||
|
|
||||||
|
# Performance metrics
|
||||||
|
latency_ms: Optional[float] = Field(None, description="Response latency in milliseconds")
|
||||||
|
provider_latency_ms: Optional[float] = Field(None, description="Provider-specific latency")
|
||||||
|
|
||||||
|
|
||||||
|
class ModelInfo(BaseModel):
|
||||||
|
"""Model information"""
|
||||||
|
id: str = Field(..., description="Model identifier")
|
||||||
|
object: str = Field("model", description="Object type")
|
||||||
|
created: Optional[int] = Field(None, description="Creation timestamp")
|
||||||
|
owned_by: str = Field(..., description="Model owner")
|
||||||
|
provider: str = Field(..., description="Provider name")
|
||||||
|
capabilities: List[str] = Field(default_factory=list, description="Model capabilities")
|
||||||
|
context_window: Optional[int] = Field(None, description="Context window size")
|
||||||
|
max_output_tokens: Optional[int] = Field(None, description="Maximum output tokens")
|
||||||
|
supports_streaming: bool = Field(False, description="Whether model supports streaming")
|
||||||
|
supports_function_calling: bool = Field(False, description="Whether model supports function calling")
|
||||||
|
|
||||||
|
|
||||||
|
class ProviderStatus(BaseModel):
|
||||||
|
"""Provider health status"""
|
||||||
|
provider: str = Field(..., description="Provider name")
|
||||||
|
status: str = Field(..., description="Status (healthy, degraded, unavailable)")
|
||||||
|
latency_ms: Optional[float] = Field(None, description="Average latency")
|
||||||
|
success_rate: Optional[float] = Field(None, description="Success rate (0.0 to 1.0)")
|
||||||
|
last_check: datetime = Field(..., description="Last health check timestamp")
|
||||||
|
error_message: Optional[str] = Field(None, description="Error message if unhealthy")
|
||||||
|
models_available: List[str] = Field(default_factory=list, description="Available models")
|
||||||
|
|
||||||
|
|
||||||
|
class LLMMetrics(BaseModel):
|
||||||
|
"""LLM service metrics"""
|
||||||
|
total_requests: int = Field(0, description="Total requests processed")
|
||||||
|
successful_requests: int = Field(0, description="Successful requests")
|
||||||
|
failed_requests: int = Field(0, description="Failed requests")
|
||||||
|
security_blocked_requests: int = Field(0, description="Security blocked requests")
|
||||||
|
average_latency_ms: float = Field(0.0, description="Average response latency")
|
||||||
|
average_risk_score: float = Field(0.0, description="Average security risk score")
|
||||||
|
provider_metrics: Dict[str, Dict[str, Any]] = Field(default_factory=dict, description="Per-provider metrics")
|
||||||
|
last_updated: datetime = Field(default_factory=datetime.utcnow, description="Last metrics update")
|
||||||
|
|
||||||
|
|
||||||
|
class ResilienceConfig(BaseModel):
|
||||||
|
"""Configuration for resilience patterns"""
|
||||||
|
max_retries: int = Field(3, ge=0, le=10, description="Maximum retry attempts")
|
||||||
|
retry_delay_ms: int = Field(1000, ge=100, le=30000, description="Initial retry delay")
|
||||||
|
retry_exponential_base: float = Field(2.0, ge=1.1, le=5.0, description="Exponential backoff base")
|
||||||
|
timeout_ms: int = Field(30000, ge=1000, le=300000, description="Request timeout")
|
||||||
|
circuit_breaker_threshold: int = Field(5, ge=1, le=50, description="Circuit breaker failure threshold")
|
||||||
|
circuit_breaker_reset_timeout_ms: int = Field(60000, ge=10000, le=600000, description="Circuit breaker reset timeout")
|
||||||
10
backend/app/services/llm/providers/__init__.py
Normal file
10
backend/app/services/llm/providers/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
"""
|
||||||
|
LLM Providers Package
|
||||||
|
|
||||||
|
Base provider interface and provider implementations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .base import BaseLLMProvider
|
||||||
|
from .privatemode import PrivateModeProvider
|
||||||
|
|
||||||
|
__all__ = ["BaseLLMProvider", "PrivateModeProvider"]
|
||||||
226
backend/app/services/llm/providers/base.py
Normal file
226
backend/app/services/llm/providers/base.py
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
"""
|
||||||
|
Base LLM Provider Interface
|
||||||
|
|
||||||
|
Abstract base class for all LLM providers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import List, Dict, Any, Optional, AsyncGenerator
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from ..models import (
|
||||||
|
ChatRequest, ChatResponse, EmbeddingRequest, EmbeddingResponse,
|
||||||
|
ModelInfo, ProviderStatus
|
||||||
|
)
|
||||||
|
from ..config import ProviderConfig
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseLLMProvider(ABC):
|
||||||
|
"""Abstract base class for LLM providers"""
|
||||||
|
|
||||||
|
def __init__(self, config: ProviderConfig, api_key: str):
|
||||||
|
"""
|
||||||
|
Initialize provider
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Provider configuration
|
||||||
|
api_key: Decrypted API key for the provider
|
||||||
|
"""
|
||||||
|
self.config = config
|
||||||
|
self.api_key = api_key
|
||||||
|
self.name = config.name
|
||||||
|
self._session = None
|
||||||
|
|
||||||
|
logger.info(f"Initializing {self.name} provider")
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def provider_name(self) -> str:
|
||||||
|
"""Get provider name"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def health_check(self) -> ProviderStatus:
|
||||||
|
"""
|
||||||
|
Check provider health status
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ProviderStatus with current health information
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_models(self) -> List[ModelInfo]:
|
||||||
|
"""
|
||||||
|
Get list of available models
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of available models with their capabilities
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def create_chat_completion(self, request: ChatRequest) -> ChatResponse:
|
||||||
|
"""
|
||||||
|
Create chat completion
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: Chat completion request
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Chat completion response
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ProviderError: If provider-specific error occurs
|
||||||
|
SecurityError: If security validation fails
|
||||||
|
ValidationError: If request validation fails
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def create_chat_completion_stream(self, request: ChatRequest) -> AsyncGenerator[Dict[str, Any], None]:
|
||||||
|
"""
|
||||||
|
Create streaming chat completion
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: Chat completion request with stream=True
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
Streaming response chunks
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ProviderError: If provider-specific error occurs
|
||||||
|
SecurityError: If security validation fails
|
||||||
|
ValidationError: If request validation fails
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def create_embedding(self, request: EmbeddingRequest) -> EmbeddingResponse:
|
||||||
|
"""
|
||||||
|
Create embeddings
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: Embedding generation request
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Embedding response
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ProviderError: If provider-specific error occurs
|
||||||
|
SecurityError: If security validation fails
|
||||||
|
ValidationError: If request validation fails
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def initialize(self):
|
||||||
|
"""Initialize provider resources (override if needed)"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def cleanup(self):
|
||||||
|
"""Cleanup provider resources"""
|
||||||
|
if self._session and hasattr(self._session, 'close'):
|
||||||
|
await self._session.close()
|
||||||
|
logger.debug(f"Cleaned up session for {self.name} provider")
|
||||||
|
|
||||||
|
def supports_model(self, model_name: str) -> bool:
|
||||||
|
"""Check if provider supports a specific model"""
|
||||||
|
return model_name in self.config.supported_models
|
||||||
|
|
||||||
|
def supports_capability(self, capability: str) -> bool:
|
||||||
|
"""Check if provider supports a specific capability"""
|
||||||
|
return capability in self.config.capabilities
|
||||||
|
|
||||||
|
def get_model_info(self, model_name: str) -> Optional[ModelInfo]:
|
||||||
|
"""Get information about a specific model (override for provider-specific info)"""
|
||||||
|
if not self.supports_model(model_name):
|
||||||
|
return None
|
||||||
|
|
||||||
|
return ModelInfo(
|
||||||
|
id=model_name,
|
||||||
|
object="model",
|
||||||
|
owned_by=self.name,
|
||||||
|
provider=self.name,
|
||||||
|
capabilities=self.config.capabilities,
|
||||||
|
context_window=self.config.max_context_window,
|
||||||
|
max_output_tokens=self.config.max_output_tokens,
|
||||||
|
supports_streaming=self.config.supports_streaming,
|
||||||
|
supports_function_calling=self.config.supports_function_calling
|
||||||
|
)
|
||||||
|
|
||||||
|
def _validate_request(self, request: Any):
|
||||||
|
"""Base request validation (override for provider-specific validation)"""
|
||||||
|
if hasattr(request, 'model') and not self.supports_model(request.model):
|
||||||
|
from ..exceptions import ValidationError
|
||||||
|
raise ValidationError(
|
||||||
|
f"Model '{request.model}' not supported by provider '{self.name}'",
|
||||||
|
field="model"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _create_headers(self, additional_headers: Optional[Dict[str, str]] = None) -> Dict[str, str]:
|
||||||
|
"""Create HTTP headers for requests"""
|
||||||
|
headers = {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Authorization": f"Bearer {self.api_key}",
|
||||||
|
"User-Agent": f"Enclava-LLM-Service/{self.name}"
|
||||||
|
}
|
||||||
|
|
||||||
|
if additional_headers:
|
||||||
|
headers.update(additional_headers)
|
||||||
|
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def _handle_http_error(self, status_code: int, response_text: str, provider_context: str = ""):
|
||||||
|
"""Handle HTTP errors consistently across providers"""
|
||||||
|
from ..exceptions import ProviderError, RateLimitError, ValidationError
|
||||||
|
|
||||||
|
context = f"{self.name} {provider_context}".strip()
|
||||||
|
|
||||||
|
if status_code == 401:
|
||||||
|
raise ProviderError(
|
||||||
|
f"Authentication failed for {context}",
|
||||||
|
provider=self.name,
|
||||||
|
error_code="AUTHENTICATION_ERROR",
|
||||||
|
details={"status_code": status_code, "response": response_text}
|
||||||
|
)
|
||||||
|
elif status_code == 403:
|
||||||
|
raise ProviderError(
|
||||||
|
f"Access forbidden for {context}",
|
||||||
|
provider=self.name,
|
||||||
|
error_code="AUTHORIZATION_ERROR",
|
||||||
|
details={"status_code": status_code, "response": response_text}
|
||||||
|
)
|
||||||
|
elif status_code == 429:
|
||||||
|
raise RateLimitError(
|
||||||
|
f"Rate limit exceeded for {context}",
|
||||||
|
error_code="RATE_LIMIT_ERROR",
|
||||||
|
details={"status_code": status_code, "response": response_text, "provider": self.name}
|
||||||
|
)
|
||||||
|
elif status_code == 400:
|
||||||
|
raise ValidationError(
|
||||||
|
f"Bad request for {context}: {response_text}",
|
||||||
|
error_code="BAD_REQUEST",
|
||||||
|
details={"status_code": status_code, "response": response_text}
|
||||||
|
)
|
||||||
|
elif 500 <= status_code < 600:
|
||||||
|
raise ProviderError(
|
||||||
|
f"Server error for {context}: {response_text}",
|
||||||
|
provider=self.name,
|
||||||
|
error_code="SERVER_ERROR",
|
||||||
|
details={"status_code": status_code, "response": response_text}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ProviderError(
|
||||||
|
f"HTTP error {status_code} for {context}: {response_text}",
|
||||||
|
provider=self.name,
|
||||||
|
error_code="HTTP_ERROR",
|
||||||
|
details={"status_code": status_code, "response": response_text}
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"{self.__class__.__name__}(name={self.name})"
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"{self.__class__.__name__}(name={self.name}, enabled={self.config.enabled})"
|
||||||
521
backend/app/services/llm/providers/privatemode.py
Normal file
521
backend/app/services/llm/providers/privatemode.py
Normal file
@@ -0,0 +1,521 @@
|
|||||||
|
"""
|
||||||
|
PrivateMode.ai LLM Provider
|
||||||
|
|
||||||
|
Integration with PrivateMode.ai TEE-protected LLM service via proxy.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
from typing import List, Dict, Any, Optional, AsyncGenerator
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
from .base import BaseLLMProvider
|
||||||
|
from ..models import (
|
||||||
|
ChatRequest, ChatResponse, ChatMessage, ChatChoice, TokenUsage,
|
||||||
|
EmbeddingRequest, EmbeddingResponse, EmbeddingData,
|
||||||
|
ModelInfo, ProviderStatus
|
||||||
|
)
|
||||||
|
from ..config import ProviderConfig
|
||||||
|
from ..exceptions import ProviderError, ValidationError, TimeoutError
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateModeProvider(BaseLLMProvider):
|
||||||
|
"""PrivateMode.ai provider with TEE security"""
|
||||||
|
|
||||||
|
def __init__(self, config: ProviderConfig, api_key: str):
|
||||||
|
super().__init__(config, api_key)
|
||||||
|
self.base_url = config.base_url.rstrip('/')
|
||||||
|
self._session: Optional[aiohttp.ClientSession] = None
|
||||||
|
|
||||||
|
# TEE-specific settings
|
||||||
|
self.verify_ssl = True # Always verify SSL for security
|
||||||
|
self.trust_env = False # Don't trust environment proxy settings
|
||||||
|
|
||||||
|
logger.info(f"PrivateMode provider initialized with base URL: {self.base_url}")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def provider_name(self) -> str:
|
||||||
|
return "privatemode"
|
||||||
|
|
||||||
|
async def _get_session(self) -> aiohttp.ClientSession:
|
||||||
|
"""Get or create HTTP session with security settings"""
|
||||||
|
if self._session is None or self._session.closed:
|
||||||
|
# Create secure connector
|
||||||
|
connector = aiohttp.TCPConnector(
|
||||||
|
verify_ssl=self.verify_ssl,
|
||||||
|
limit=100, # Connection pool limit
|
||||||
|
limit_per_host=50,
|
||||||
|
ttl_dns_cache=300, # DNS cache TTL
|
||||||
|
use_dns_cache=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create session with security headers
|
||||||
|
timeout = aiohttp.ClientTimeout(total=self.config.resilience.timeout_ms / 1000.0)
|
||||||
|
|
||||||
|
self._session = aiohttp.ClientSession(
|
||||||
|
connector=connector,
|
||||||
|
timeout=timeout,
|
||||||
|
headers=self._create_headers(),
|
||||||
|
trust_env=False # Don't trust environment variables
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug("Created new secure HTTP session for PrivateMode")
|
||||||
|
|
||||||
|
return self._session
|
||||||
|
|
||||||
|
async def health_check(self) -> ProviderStatus:
|
||||||
|
"""Check PrivateMode.ai service health"""
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
try:
|
||||||
|
session = await self._get_session()
|
||||||
|
|
||||||
|
# Use a lightweight endpoint for health check
|
||||||
|
async with session.get(f"{self.base_url}/models") as response:
|
||||||
|
latency = (time.time() - start_time) * 1000
|
||||||
|
|
||||||
|
if response.status == 200:
|
||||||
|
models_data = await response.json()
|
||||||
|
models = [model.get("id", "") for model in models_data.get("data", [])]
|
||||||
|
|
||||||
|
return ProviderStatus(
|
||||||
|
provider=self.provider_name,
|
||||||
|
status="healthy",
|
||||||
|
latency_ms=latency,
|
||||||
|
success_rate=1.0,
|
||||||
|
last_check=datetime.utcnow(),
|
||||||
|
models_available=models
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
error_text = await response.text()
|
||||||
|
return ProviderStatus(
|
||||||
|
provider=self.provider_name,
|
||||||
|
status="degraded",
|
||||||
|
latency_ms=latency,
|
||||||
|
success_rate=0.0,
|
||||||
|
last_check=datetime.utcnow(),
|
||||||
|
error_message=f"HTTP {response.status}: {error_text}",
|
||||||
|
models_available=[]
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
latency = (time.time() - start_time) * 1000
|
||||||
|
logger.error(f"PrivateMode health check failed: {e}")
|
||||||
|
|
||||||
|
return ProviderStatus(
|
||||||
|
provider=self.provider_name,
|
||||||
|
status="unavailable",
|
||||||
|
latency_ms=latency,
|
||||||
|
success_rate=0.0,
|
||||||
|
last_check=datetime.utcnow(),
|
||||||
|
error_message=str(e),
|
||||||
|
models_available=[]
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_models(self) -> List[ModelInfo]:
|
||||||
|
"""Get available models from PrivateMode.ai"""
|
||||||
|
try:
|
||||||
|
session = await self._get_session()
|
||||||
|
|
||||||
|
async with session.get(f"{self.base_url}/models") as response:
|
||||||
|
if response.status == 200:
|
||||||
|
data = await response.json()
|
||||||
|
models_data = data.get("data", [])
|
||||||
|
|
||||||
|
models = []
|
||||||
|
for model_data in models_data:
|
||||||
|
model_id = model_data.get("id", "")
|
||||||
|
if not model_id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Extract model capabilities from PrivateMode response
|
||||||
|
model_info = ModelInfo(
|
||||||
|
id=model_id,
|
||||||
|
object="model",
|
||||||
|
created=model_data.get("created", int(time.time())),
|
||||||
|
owned_by="privatemode",
|
||||||
|
provider=self.provider_name,
|
||||||
|
capabilities=self._get_model_capabilities(model_id),
|
||||||
|
context_window=self._get_model_context_window(model_id),
|
||||||
|
max_output_tokens=self._get_model_max_output(model_id),
|
||||||
|
supports_streaming=True, # PrivateMode supports streaming
|
||||||
|
supports_function_calling=self._supports_function_calling(model_id)
|
||||||
|
)
|
||||||
|
models.append(model_info)
|
||||||
|
|
||||||
|
logger.info(f"Retrieved {len(models)} models from PrivateMode")
|
||||||
|
return models
|
||||||
|
else:
|
||||||
|
error_text = await response.text()
|
||||||
|
self._handle_http_error(response.status, error_text, "models endpoint")
|
||||||
|
return [] # Never reached due to exception
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
if isinstance(e, ProviderError):
|
||||||
|
raise
|
||||||
|
|
||||||
|
logger.error(f"Failed to get models from PrivateMode: {e}")
|
||||||
|
raise ProviderError(
|
||||||
|
"Failed to retrieve models from PrivateMode",
|
||||||
|
provider=self.provider_name,
|
||||||
|
error_code="MODEL_RETRIEVAL_ERROR",
|
||||||
|
details={"error": str(e)}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def create_chat_completion(self, request: ChatRequest) -> ChatResponse:
|
||||||
|
"""Create chat completion via PrivateMode.ai"""
|
||||||
|
self._validate_request(request)
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
try:
|
||||||
|
session = await self._get_session()
|
||||||
|
|
||||||
|
# Prepare request payload
|
||||||
|
payload = {
|
||||||
|
"model": request.model,
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"role": msg.role,
|
||||||
|
"content": msg.content,
|
||||||
|
**({"name": msg.name} if msg.name else {})
|
||||||
|
}
|
||||||
|
for msg in request.messages
|
||||||
|
],
|
||||||
|
"temperature": request.temperature,
|
||||||
|
"stream": False # Non-streaming version
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add optional parameters
|
||||||
|
if request.max_tokens is not None:
|
||||||
|
payload["max_tokens"] = request.max_tokens
|
||||||
|
if request.top_p is not None:
|
||||||
|
payload["top_p"] = request.top_p
|
||||||
|
if request.frequency_penalty is not None:
|
||||||
|
payload["frequency_penalty"] = request.frequency_penalty
|
||||||
|
if request.presence_penalty is not None:
|
||||||
|
payload["presence_penalty"] = request.presence_penalty
|
||||||
|
if request.stop is not None:
|
||||||
|
payload["stop"] = request.stop
|
||||||
|
|
||||||
|
# Add user tracking
|
||||||
|
payload["user"] = f"user_{request.user_id}"
|
||||||
|
|
||||||
|
# Add metadata for TEE audit trail
|
||||||
|
payload["metadata"] = {
|
||||||
|
"user_id": request.user_id,
|
||||||
|
"api_key_id": request.api_key_id,
|
||||||
|
"timestamp": datetime.utcnow().isoformat(),
|
||||||
|
"enclava_request_id": str(uuid.uuid4()),
|
||||||
|
**(request.metadata or {})
|
||||||
|
}
|
||||||
|
|
||||||
|
async with session.post(
|
||||||
|
f"{self.base_url}/chat/completions",
|
||||||
|
json=payload
|
||||||
|
) as response:
|
||||||
|
provider_latency = (time.time() - start_time) * 1000
|
||||||
|
|
||||||
|
if response.status == 200:
|
||||||
|
data = await response.json()
|
||||||
|
|
||||||
|
# Parse response
|
||||||
|
choices = []
|
||||||
|
for choice_data in data.get("choices", []):
|
||||||
|
message_data = choice_data.get("message", {})
|
||||||
|
choice = ChatChoice(
|
||||||
|
index=choice_data.get("index", 0),
|
||||||
|
message=ChatMessage(
|
||||||
|
role=message_data.get("role", "assistant"),
|
||||||
|
content=message_data.get("content", "")
|
||||||
|
),
|
||||||
|
finish_reason=choice_data.get("finish_reason")
|
||||||
|
)
|
||||||
|
choices.append(choice)
|
||||||
|
|
||||||
|
# Parse token usage
|
||||||
|
usage_data = data.get("usage", {})
|
||||||
|
usage = TokenUsage(
|
||||||
|
prompt_tokens=usage_data.get("prompt_tokens", 0),
|
||||||
|
completion_tokens=usage_data.get("completion_tokens", 0),
|
||||||
|
total_tokens=usage_data.get("total_tokens", 0)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create response
|
||||||
|
chat_response = ChatResponse(
|
||||||
|
id=data.get("id", str(uuid.uuid4())),
|
||||||
|
object=data.get("object", "chat.completion"),
|
||||||
|
created=data.get("created", int(time.time())),
|
||||||
|
model=data.get("model", request.model),
|
||||||
|
provider=self.provider_name,
|
||||||
|
choices=choices,
|
||||||
|
usage=usage,
|
||||||
|
system_fingerprint=data.get("system_fingerprint"),
|
||||||
|
security_check=True, # Will be set by security manager
|
||||||
|
risk_score=0.0, # Will be set by security manager
|
||||||
|
latency_ms=provider_latency,
|
||||||
|
provider_latency_ms=provider_latency
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(f"PrivateMode chat completion successful in {provider_latency:.2f}ms")
|
||||||
|
return chat_response
|
||||||
|
|
||||||
|
else:
|
||||||
|
error_text = await response.text()
|
||||||
|
self._handle_http_error(response.status, error_text, "chat completion")
|
||||||
|
|
||||||
|
except aiohttp.ClientError as e:
|
||||||
|
logger.error(f"PrivateMode request error: {e}")
|
||||||
|
raise ProviderError(
|
||||||
|
"Network error communicating with PrivateMode",
|
||||||
|
provider=self.provider_name,
|
||||||
|
error_code="NETWORK_ERROR",
|
||||||
|
details={"error": str(e)}
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
if isinstance(e, (ProviderError, ValidationError)):
|
||||||
|
raise
|
||||||
|
|
||||||
|
logger.error(f"Unexpected error in PrivateMode chat completion: {e}")
|
||||||
|
raise ProviderError(
|
||||||
|
"Unexpected error during chat completion",
|
||||||
|
provider=self.provider_name,
|
||||||
|
error_code="UNEXPECTED_ERROR",
|
||||||
|
details={"error": str(e)}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def create_chat_completion_stream(self, request: ChatRequest) -> AsyncGenerator[Dict[str, Any], None]:
|
||||||
|
"""Create streaming chat completion"""
|
||||||
|
self._validate_request(request)
|
||||||
|
|
||||||
|
try:
|
||||||
|
session = await self._get_session()
|
||||||
|
|
||||||
|
# Prepare streaming payload
|
||||||
|
payload = {
|
||||||
|
"model": request.model,
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"role": msg.role,
|
||||||
|
"content": msg.content,
|
||||||
|
**({"name": msg.name} if msg.name else {})
|
||||||
|
}
|
||||||
|
for msg in request.messages
|
||||||
|
],
|
||||||
|
"temperature": request.temperature,
|
||||||
|
"stream": True
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add optional parameters
|
||||||
|
if request.max_tokens is not None:
|
||||||
|
payload["max_tokens"] = request.max_tokens
|
||||||
|
if request.top_p is not None:
|
||||||
|
payload["top_p"] = request.top_p
|
||||||
|
if request.frequency_penalty is not None:
|
||||||
|
payload["frequency_penalty"] = request.frequency_penalty
|
||||||
|
if request.presence_penalty is not None:
|
||||||
|
payload["presence_penalty"] = request.presence_penalty
|
||||||
|
if request.stop is not None:
|
||||||
|
payload["stop"] = request.stop
|
||||||
|
|
||||||
|
# Add user tracking
|
||||||
|
payload["user"] = f"user_{request.user_id}"
|
||||||
|
|
||||||
|
async with session.post(
|
||||||
|
f"{self.base_url}/chat/completions",
|
||||||
|
json=payload
|
||||||
|
) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
async for line in response.content:
|
||||||
|
line = line.decode('utf-8').strip()
|
||||||
|
|
||||||
|
if line.startswith("data: "):
|
||||||
|
data_str = line[6:] # Remove "data: " prefix
|
||||||
|
|
||||||
|
if data_str == "[DONE]":
|
||||||
|
break
|
||||||
|
|
||||||
|
try:
|
||||||
|
chunk_data = json.loads(data_str)
|
||||||
|
yield chunk_data
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
logger.warning(f"Failed to parse streaming chunk: {data_str}")
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
error_text = await response.text()
|
||||||
|
self._handle_http_error(response.status, error_text, "streaming chat completion")
|
||||||
|
|
||||||
|
except aiohttp.ClientError as e:
|
||||||
|
logger.error(f"PrivateMode streaming error: {e}")
|
||||||
|
raise ProviderError(
|
||||||
|
"Network error during streaming",
|
||||||
|
provider=self.provider_name,
|
||||||
|
error_code="STREAMING_ERROR",
|
||||||
|
details={"error": str(e)}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def create_embedding(self, request: EmbeddingRequest) -> EmbeddingResponse:
|
||||||
|
"""Create embeddings via PrivateMode.ai"""
|
||||||
|
self._validate_request(request)
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
try:
|
||||||
|
session = await self._get_session()
|
||||||
|
|
||||||
|
# Prepare embedding payload
|
||||||
|
payload = {
|
||||||
|
"model": request.model,
|
||||||
|
"input": request.input,
|
||||||
|
"user": f"user_{request.user_id}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add optional parameters
|
||||||
|
if request.encoding_format:
|
||||||
|
payload["encoding_format"] = request.encoding_format
|
||||||
|
if request.dimensions:
|
||||||
|
payload["dimensions"] = request.dimensions
|
||||||
|
|
||||||
|
# Add metadata
|
||||||
|
payload["metadata"] = {
|
||||||
|
"user_id": request.user_id,
|
||||||
|
"api_key_id": request.api_key_id,
|
||||||
|
"timestamp": datetime.utcnow().isoformat(),
|
||||||
|
**(request.metadata or {})
|
||||||
|
}
|
||||||
|
|
||||||
|
async with session.post(
|
||||||
|
f"{self.base_url}/embeddings",
|
||||||
|
json=payload
|
||||||
|
) as response:
|
||||||
|
provider_latency = (time.time() - start_time) * 1000
|
||||||
|
|
||||||
|
if response.status == 200:
|
||||||
|
data = await response.json()
|
||||||
|
|
||||||
|
# Parse embedding data
|
||||||
|
embeddings = []
|
||||||
|
for emb_data in data.get("data", []):
|
||||||
|
embedding = EmbeddingData(
|
||||||
|
object="embedding",
|
||||||
|
index=emb_data.get("index", 0),
|
||||||
|
embedding=emb_data.get("embedding", [])
|
||||||
|
)
|
||||||
|
embeddings.append(embedding)
|
||||||
|
|
||||||
|
# Parse usage
|
||||||
|
usage_data = data.get("usage", {})
|
||||||
|
usage = TokenUsage(
|
||||||
|
prompt_tokens=usage_data.get("prompt_tokens", 0),
|
||||||
|
completion_tokens=0, # No completion tokens for embeddings
|
||||||
|
total_tokens=usage_data.get("total_tokens", usage_data.get("prompt_tokens", 0))
|
||||||
|
)
|
||||||
|
|
||||||
|
return EmbeddingResponse(
|
||||||
|
object="list",
|
||||||
|
data=embeddings,
|
||||||
|
model=data.get("model", request.model),
|
||||||
|
provider=self.provider_name,
|
||||||
|
usage=usage,
|
||||||
|
security_check=True, # Will be set by security manager
|
||||||
|
risk_score=0.0, # Will be set by security manager
|
||||||
|
latency_ms=provider_latency,
|
||||||
|
provider_latency_ms=provider_latency
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
error_text = await response.text()
|
||||||
|
self._handle_http_error(response.status, error_text, "embeddings")
|
||||||
|
|
||||||
|
except aiohttp.ClientError as e:
|
||||||
|
logger.error(f"PrivateMode embedding error: {e}")
|
||||||
|
raise ProviderError(
|
||||||
|
"Network error during embedding generation",
|
||||||
|
provider=self.provider_name,
|
||||||
|
error_code="EMBEDDING_ERROR",
|
||||||
|
details={"error": str(e)}
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
if isinstance(e, (ProviderError, ValidationError)):
|
||||||
|
raise
|
||||||
|
|
||||||
|
logger.error(f"Unexpected error in PrivateMode embedding: {e}")
|
||||||
|
raise ProviderError(
|
||||||
|
"Unexpected error during embedding generation",
|
||||||
|
provider=self.provider_name,
|
||||||
|
error_code="UNEXPECTED_ERROR",
|
||||||
|
details={"error": str(e)}
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_model_capabilities(self, model_id: str) -> List[str]:
|
||||||
|
"""Get capabilities for a specific model"""
|
||||||
|
capabilities = ["chat"]
|
||||||
|
|
||||||
|
# PrivateMode supports embeddings for most models
|
||||||
|
if "embed" in model_id.lower() or model_id in [
|
||||||
|
"privatemode-llama-3.1-405b", "privatemode-llama-3.1-70b",
|
||||||
|
"privatemode-claude-3.5-sonnet", "privatemode-gpt-4o"
|
||||||
|
]:
|
||||||
|
capabilities.append("embeddings")
|
||||||
|
|
||||||
|
# TEE protection is available for all PrivateMode models
|
||||||
|
capabilities.append("tee")
|
||||||
|
|
||||||
|
return capabilities
|
||||||
|
|
||||||
|
def _get_model_context_window(self, model_id: str) -> Optional[int]:
|
||||||
|
"""Get context window size for a specific model"""
|
||||||
|
context_windows = {
|
||||||
|
"privatemode-llama-3.1-405b": 128000,
|
||||||
|
"privatemode-llama-3.1-70b": 128000,
|
||||||
|
"privatemode-llama-3.1-8b": 128000,
|
||||||
|
"privatemode-llama-3-70b": 8192,
|
||||||
|
"privatemode-llama-3-8b": 8192,
|
||||||
|
"privatemode-claude-3.5-sonnet": 200000,
|
||||||
|
"privatemode-claude-3-haiku": 200000,
|
||||||
|
"privatemode-gpt-4o": 128000,
|
||||||
|
"privatemode-gpt-4o-mini": 128000,
|
||||||
|
"privatemode-gemini-1.5-pro": 2000000,
|
||||||
|
"privatemode-gemini-1.5-flash": 1000000
|
||||||
|
}
|
||||||
|
|
||||||
|
return context_windows.get(model_id, 8192) # Default to 8K
|
||||||
|
|
||||||
|
def _get_model_max_output(self, model_id: str) -> Optional[int]:
|
||||||
|
"""Get max output tokens for a specific model"""
|
||||||
|
max_outputs = {
|
||||||
|
"privatemode-llama-3.1-405b": 8192,
|
||||||
|
"privatemode-llama-3.1-70b": 8192,
|
||||||
|
"privatemode-llama-3.1-8b": 8192,
|
||||||
|
"privatemode-llama-3-70b": 4096,
|
||||||
|
"privatemode-llama-3-8b": 4096,
|
||||||
|
"privatemode-claude-3.5-sonnet": 8192,
|
||||||
|
"privatemode-claude-3-haiku": 4096,
|
||||||
|
"privatemode-gpt-4o": 16384,
|
||||||
|
"privatemode-gpt-4o-mini": 16384,
|
||||||
|
"privatemode-gemini-1.5-pro": 8192,
|
||||||
|
"privatemode-gemini-1.5-flash": 8192
|
||||||
|
}
|
||||||
|
|
||||||
|
return max_outputs.get(model_id, 4096) # Default to 4K
|
||||||
|
|
||||||
|
def _supports_function_calling(self, model_id: str) -> bool:
|
||||||
|
"""Check if model supports function calling"""
|
||||||
|
function_calling_models = [
|
||||||
|
"privatemode-gpt-4o", "privatemode-gpt-4o-mini",
|
||||||
|
"privatemode-claude-3.5-sonnet", "privatemode-claude-3-haiku",
|
||||||
|
"privatemode-gemini-1.5-pro", "privatemode-gemini-1.5-flash"
|
||||||
|
]
|
||||||
|
|
||||||
|
return model_id in function_calling_models
|
||||||
|
|
||||||
|
async def cleanup(self):
|
||||||
|
"""Cleanup PrivateMode provider resources"""
|
||||||
|
await super().cleanup()
|
||||||
|
logger.debug("PrivateMode provider cleanup completed")
|
||||||
332
backend/app/services/llm/resilience.py
Normal file
332
backend/app/services/llm/resilience.py
Normal file
@@ -0,0 +1,332 @@
|
|||||||
|
"""
|
||||||
|
Resilience Patterns for LLM Service
|
||||||
|
|
||||||
|
Implements retry logic, circuit breaker, and timeout management.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from typing import Callable, Any, Optional, Dict, Type
|
||||||
|
from enum import Enum
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
from .exceptions import LLMError, TimeoutError, RateLimitError
|
||||||
|
from .models import ResilienceConfig
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CircuitBreakerState(Enum):
|
||||||
|
"""Circuit breaker states"""
|
||||||
|
CLOSED = "closed" # Normal operation
|
||||||
|
OPEN = "open" # Failing, blocking requests
|
||||||
|
HALF_OPEN = "half_open" # Testing if service recovered
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CircuitBreakerStats:
|
||||||
|
"""Circuit breaker statistics"""
|
||||||
|
failure_count: int = 0
|
||||||
|
success_count: int = 0
|
||||||
|
last_failure_time: Optional[datetime] = None
|
||||||
|
last_success_time: Optional[datetime] = None
|
||||||
|
state_change_time: datetime = field(default_factory=datetime.utcnow)
|
||||||
|
|
||||||
|
|
||||||
|
class CircuitBreaker:
|
||||||
|
"""Circuit breaker implementation for provider resilience"""
|
||||||
|
|
||||||
|
def __init__(self, config: ResilienceConfig, provider_name: str):
|
||||||
|
self.config = config
|
||||||
|
self.provider_name = provider_name
|
||||||
|
self.state = CircuitBreakerState.CLOSED
|
||||||
|
self.stats = CircuitBreakerStats()
|
||||||
|
|
||||||
|
def can_execute(self) -> bool:
|
||||||
|
"""Check if request can be executed"""
|
||||||
|
if self.state == CircuitBreakerState.CLOSED:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if self.state == CircuitBreakerState.OPEN:
|
||||||
|
# Check if reset timeout has passed
|
||||||
|
if (datetime.utcnow() - self.stats.state_change_time).total_seconds() * 1000 > self.config.circuit_breaker_reset_timeout_ms:
|
||||||
|
self._transition_to_half_open()
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.state == CircuitBreakerState.HALF_OPEN:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def record_success(self):
|
||||||
|
"""Record successful request"""
|
||||||
|
self.stats.success_count += 1
|
||||||
|
self.stats.last_success_time = datetime.utcnow()
|
||||||
|
|
||||||
|
if self.state == CircuitBreakerState.HALF_OPEN:
|
||||||
|
self._transition_to_closed()
|
||||||
|
elif self.state == CircuitBreakerState.CLOSED:
|
||||||
|
# Reset failure count on success
|
||||||
|
self.stats.failure_count = 0
|
||||||
|
|
||||||
|
logger.debug(f"Circuit breaker [{self.provider_name}]: Success recorded, state={self.state.value}")
|
||||||
|
|
||||||
|
def record_failure(self):
|
||||||
|
"""Record failed request"""
|
||||||
|
self.stats.failure_count += 1
|
||||||
|
self.stats.last_failure_time = datetime.utcnow()
|
||||||
|
|
||||||
|
if self.state == CircuitBreakerState.CLOSED:
|
||||||
|
if self.stats.failure_count >= self.config.circuit_breaker_threshold:
|
||||||
|
self._transition_to_open()
|
||||||
|
elif self.state == CircuitBreakerState.HALF_OPEN:
|
||||||
|
self._transition_to_open()
|
||||||
|
|
||||||
|
logger.warning(f"Circuit breaker [{self.provider_name}]: Failure recorded, "
|
||||||
|
f"count={self.stats.failure_count}, state={self.state.value}")
|
||||||
|
|
||||||
|
def _transition_to_open(self):
|
||||||
|
"""Transition to OPEN state"""
|
||||||
|
self.state = CircuitBreakerState.OPEN
|
||||||
|
self.stats.state_change_time = datetime.utcnow()
|
||||||
|
logger.error(f"Circuit breaker [{self.provider_name}]: OPENED after {self.stats.failure_count} failures")
|
||||||
|
|
||||||
|
def _transition_to_half_open(self):
|
||||||
|
"""Transition to HALF_OPEN state"""
|
||||||
|
self.state = CircuitBreakerState.HALF_OPEN
|
||||||
|
self.stats.state_change_time = datetime.utcnow()
|
||||||
|
logger.info(f"Circuit breaker [{self.provider_name}]: Transitioning to HALF_OPEN for testing")
|
||||||
|
|
||||||
|
def _transition_to_closed(self):
|
||||||
|
"""Transition to CLOSED state"""
|
||||||
|
self.state = CircuitBreakerState.CLOSED
|
||||||
|
self.stats.state_change_time = datetime.utcnow()
|
||||||
|
self.stats.failure_count = 0 # Reset failure count
|
||||||
|
logger.info(f"Circuit breaker [{self.provider_name}]: CLOSED - service recovered")
|
||||||
|
|
||||||
|
def get_stats(self) -> Dict[str, Any]:
|
||||||
|
"""Get circuit breaker statistics"""
|
||||||
|
return {
|
||||||
|
"state": self.state.value,
|
||||||
|
"failure_count": self.stats.failure_count,
|
||||||
|
"success_count": self.stats.success_count,
|
||||||
|
"last_failure_time": self.stats.last_failure_time.isoformat() if self.stats.last_failure_time else None,
|
||||||
|
"last_success_time": self.stats.last_success_time.isoformat() if self.stats.last_success_time else None,
|
||||||
|
"state_change_time": self.stats.state_change_time.isoformat(),
|
||||||
|
"time_in_current_state_ms": (datetime.utcnow() - self.stats.state_change_time).total_seconds() * 1000
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class RetryManager:
|
||||||
|
"""Manages retry logic with exponential backoff"""
|
||||||
|
|
||||||
|
def __init__(self, config: ResilienceConfig):
|
||||||
|
self.config = config
|
||||||
|
|
||||||
|
async def execute_with_retry(
|
||||||
|
self,
|
||||||
|
func: Callable,
|
||||||
|
*args,
|
||||||
|
retryable_exceptions: tuple = (Exception,),
|
||||||
|
non_retryable_exceptions: tuple = (RateLimitError,),
|
||||||
|
**kwargs
|
||||||
|
) -> Any:
|
||||||
|
"""Execute function with retry logic"""
|
||||||
|
last_exception = None
|
||||||
|
|
||||||
|
for attempt in range(self.config.max_retries + 1):
|
||||||
|
try:
|
||||||
|
return await func(*args, **kwargs)
|
||||||
|
|
||||||
|
except non_retryable_exceptions as e:
|
||||||
|
logger.warning(f"Non-retryable exception on attempt {attempt + 1}: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
except retryable_exceptions as e:
|
||||||
|
last_exception = e
|
||||||
|
|
||||||
|
if attempt == self.config.max_retries:
|
||||||
|
logger.error(f"All {self.config.max_retries + 1} attempts failed. Last error: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
delay = self._calculate_delay(attempt)
|
||||||
|
logger.warning(f"Attempt {attempt + 1} failed: {e}. Retrying in {delay}ms...")
|
||||||
|
|
||||||
|
await asyncio.sleep(delay / 1000.0)
|
||||||
|
|
||||||
|
# This should never be reached, but just in case
|
||||||
|
if last_exception:
|
||||||
|
raise last_exception
|
||||||
|
else:
|
||||||
|
raise LLMError("Unexpected error in retry logic")
|
||||||
|
|
||||||
|
def _calculate_delay(self, attempt: int) -> int:
|
||||||
|
"""Calculate delay for exponential backoff"""
|
||||||
|
delay = self.config.retry_delay_ms * (self.config.retry_exponential_base ** attempt)
|
||||||
|
|
||||||
|
# Add some jitter to prevent thundering herd
|
||||||
|
import random
|
||||||
|
jitter = random.uniform(0.8, 1.2)
|
||||||
|
|
||||||
|
return int(delay * jitter)
|
||||||
|
|
||||||
|
|
||||||
|
class TimeoutManager:
|
||||||
|
"""Manages request timeouts"""
|
||||||
|
|
||||||
|
def __init__(self, config: ResilienceConfig):
|
||||||
|
self.config = config
|
||||||
|
|
||||||
|
async def execute_with_timeout(
|
||||||
|
self,
|
||||||
|
func: Callable,
|
||||||
|
*args,
|
||||||
|
timeout_override: Optional[int] = None,
|
||||||
|
**kwargs
|
||||||
|
) -> Any:
|
||||||
|
"""Execute function with timeout"""
|
||||||
|
timeout_ms = timeout_override or self.config.timeout_ms
|
||||||
|
timeout_seconds = timeout_ms / 1000.0
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await asyncio.wait_for(func(*args, **kwargs), timeout=timeout_seconds)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
error_msg = f"Request timed out after {timeout_ms}ms"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise TimeoutError(error_msg, timeout_duration=timeout_seconds)
|
||||||
|
|
||||||
|
|
||||||
|
class ResilienceManager:
|
||||||
|
"""Comprehensive resilience manager combining all patterns"""
|
||||||
|
|
||||||
|
def __init__(self, config: ResilienceConfig, provider_name: str):
|
||||||
|
self.config = config
|
||||||
|
self.provider_name = provider_name
|
||||||
|
self.circuit_breaker = CircuitBreaker(config, provider_name)
|
||||||
|
self.retry_manager = RetryManager(config)
|
||||||
|
self.timeout_manager = TimeoutManager(config)
|
||||||
|
|
||||||
|
async def execute(
|
||||||
|
self,
|
||||||
|
func: Callable,
|
||||||
|
*args,
|
||||||
|
retryable_exceptions: tuple = (Exception,),
|
||||||
|
non_retryable_exceptions: tuple = (RateLimitError,),
|
||||||
|
timeout_override: Optional[int] = None,
|
||||||
|
**kwargs
|
||||||
|
) -> Any:
|
||||||
|
"""Execute function with full resilience patterns"""
|
||||||
|
|
||||||
|
# Check circuit breaker
|
||||||
|
if not self.circuit_breaker.can_execute():
|
||||||
|
error_msg = f"Circuit breaker is OPEN for provider {self.provider_name}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise LLMError(error_msg, error_code="CIRCUIT_BREAKER_OPEN")
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Execute with timeout and retry
|
||||||
|
result = await self.retry_manager.execute_with_retry(
|
||||||
|
self.timeout_manager.execute_with_timeout,
|
||||||
|
func,
|
||||||
|
*args,
|
||||||
|
retryable_exceptions=retryable_exceptions,
|
||||||
|
non_retryable_exceptions=non_retryable_exceptions,
|
||||||
|
timeout_override=timeout_override,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
# Record success
|
||||||
|
self.circuit_breaker.record_success()
|
||||||
|
|
||||||
|
execution_time = (time.time() - start_time) * 1000
|
||||||
|
logger.debug(f"Resilient execution succeeded for {self.provider_name} in {execution_time:.2f}ms")
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Record failure
|
||||||
|
self.circuit_breaker.record_failure()
|
||||||
|
|
||||||
|
execution_time = (time.time() - start_time) * 1000
|
||||||
|
logger.error(f"Resilient execution failed for {self.provider_name} after {execution_time:.2f}ms: {e}")
|
||||||
|
|
||||||
|
raise
|
||||||
|
|
||||||
|
def get_health_status(self) -> Dict[str, Any]:
|
||||||
|
"""Get comprehensive health status"""
|
||||||
|
cb_stats = self.circuit_breaker.get_stats()
|
||||||
|
|
||||||
|
# Determine overall health
|
||||||
|
if cb_stats["state"] == "open":
|
||||||
|
health = "unhealthy"
|
||||||
|
elif cb_stats["state"] == "half_open":
|
||||||
|
health = "degraded"
|
||||||
|
else:
|
||||||
|
# Check recent failure rate
|
||||||
|
recent_failures = cb_stats["failure_count"]
|
||||||
|
if recent_failures > self.config.circuit_breaker_threshold // 2:
|
||||||
|
health = "degraded"
|
||||||
|
else:
|
||||||
|
health = "healthy"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"provider": self.provider_name,
|
||||||
|
"health": health,
|
||||||
|
"circuit_breaker": cb_stats,
|
||||||
|
"config": {
|
||||||
|
"max_retries": self.config.max_retries,
|
||||||
|
"timeout_ms": self.config.timeout_ms,
|
||||||
|
"circuit_breaker_threshold": self.config.circuit_breaker_threshold
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ResilienceManagerFactory:
|
||||||
|
"""Factory for creating resilience managers"""
|
||||||
|
|
||||||
|
_managers: Dict[str, ResilienceManager] = {}
|
||||||
|
_default_config = ResilienceConfig()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_manager(cls, provider_name: str, config: Optional[ResilienceConfig] = None) -> ResilienceManager:
|
||||||
|
"""Get or create resilience manager for provider"""
|
||||||
|
if provider_name not in cls._managers:
|
||||||
|
manager_config = config or cls._default_config
|
||||||
|
cls._managers[provider_name] = ResilienceManager(manager_config, provider_name)
|
||||||
|
|
||||||
|
return cls._managers[provider_name]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_all_health_status(cls) -> Dict[str, Dict[str, Any]]:
|
||||||
|
"""Get health status for all managed providers"""
|
||||||
|
return {
|
||||||
|
name: manager.get_health_status()
|
||||||
|
for name, manager in cls._managers.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def update_config(cls, provider_name: str, config: ResilienceConfig):
|
||||||
|
"""Update configuration for a specific provider"""
|
||||||
|
if provider_name in cls._managers:
|
||||||
|
cls._managers[provider_name].config = config
|
||||||
|
cls._managers[provider_name].circuit_breaker.config = config
|
||||||
|
cls._managers[provider_name].retry_manager.config = config
|
||||||
|
cls._managers[provider_name].timeout_manager.config = config
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def reset_circuit_breaker(cls, provider_name: str):
|
||||||
|
"""Manually reset circuit breaker for a provider"""
|
||||||
|
if provider_name in cls._managers:
|
||||||
|
manager = cls._managers[provider_name]
|
||||||
|
manager.circuit_breaker._transition_to_closed()
|
||||||
|
logger.info(f"Manually reset circuit breaker for {provider_name}")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def set_default_config(cls, config: ResilienceConfig):
|
||||||
|
"""Set default configuration for new managers"""
|
||||||
|
cls._default_config = config
|
||||||
352
backend/app/services/llm/security.py
Normal file
352
backend/app/services/llm/security.py
Normal file
@@ -0,0 +1,352 @@
|
|||||||
|
"""
|
||||||
|
LLM Security Manager
|
||||||
|
|
||||||
|
Handles API key encryption, prompt injection detection, and audit logging.
|
||||||
|
Provides comprehensive security for LLM interactions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import hashlib
|
||||||
|
from typing import Dict, Any, List, Optional, Tuple
|
||||||
|
from datetime import datetime
|
||||||
|
from cryptography.fernet import Fernet
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||||
|
import base64
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class SecurityManager:
|
||||||
|
"""Manages security for LLM operations"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._fernet = None
|
||||||
|
self._setup_encryption()
|
||||||
|
self._setup_prompt_injection_patterns()
|
||||||
|
|
||||||
|
def _setup_encryption(self):
|
||||||
|
"""Setup Fernet encryption for API keys"""
|
||||||
|
try:
|
||||||
|
# Get encryption key from environment or generate one
|
||||||
|
encryption_key = os.getenv("LLM_ENCRYPTION_KEY")
|
||||||
|
|
||||||
|
if not encryption_key:
|
||||||
|
# Generate a key if none exists (for development)
|
||||||
|
# In production, this should be set as an environment variable
|
||||||
|
logger.warning("LLM_ENCRYPTION_KEY not set, generating temporary key")
|
||||||
|
key = Fernet.generate_key()
|
||||||
|
encryption_key = key.decode()
|
||||||
|
logger.info(f"Generated temporary encryption key: {encryption_key}")
|
||||||
|
else:
|
||||||
|
# Validate the key format
|
||||||
|
try:
|
||||||
|
key = encryption_key.encode()
|
||||||
|
Fernet(key) # Test if key is valid
|
||||||
|
except Exception:
|
||||||
|
# Key might be a password, derive Fernet key from it
|
||||||
|
key = self._derive_key_from_password(encryption_key)
|
||||||
|
|
||||||
|
self._fernet = Fernet(key if isinstance(key, bytes) else key.encode())
|
||||||
|
logger.info("Encryption system initialized successfully")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to setup encryption: {e}")
|
||||||
|
raise RuntimeError("Encryption setup failed")
|
||||||
|
|
||||||
|
def _derive_key_from_password(self, password: str) -> bytes:
|
||||||
|
"""Derive Fernet key from password using PBKDF2"""
|
||||||
|
# Use a fixed salt for consistency (in production, store this securely)
|
||||||
|
salt = b"enclava_llm_salt"
|
||||||
|
kdf = PBKDF2HMAC(
|
||||||
|
algorithm=hashes.SHA256(),
|
||||||
|
length=32,
|
||||||
|
salt=salt,
|
||||||
|
iterations=100000,
|
||||||
|
)
|
||||||
|
key = base64.urlsafe_b64encode(kdf.derive(password.encode()))
|
||||||
|
return key
|
||||||
|
|
||||||
|
def _setup_prompt_injection_patterns(self):
|
||||||
|
"""Setup patterns for prompt injection detection"""
|
||||||
|
self.injection_patterns = [
|
||||||
|
# Direct instruction injection
|
||||||
|
r"(?i)(ignore|forget|disregard|override)\s+(previous|all|above|prior)\s+(instructions|rules|prompts)",
|
||||||
|
r"(?i)(new|updated|different)\s+(instructions|rules|system)",
|
||||||
|
r"(?i)act\s+as\s+(if|though)\s+you\s+(are|were)",
|
||||||
|
r"(?i)pretend\s+(to\s+be|you\s+are)",
|
||||||
|
r"(?i)you\s+are\s+now\s+(a|an)\s+",
|
||||||
|
|
||||||
|
# System role manipulation
|
||||||
|
r"(?i)system\s*:\s*",
|
||||||
|
r"(?i)\[system\]",
|
||||||
|
r"(?i)<system>",
|
||||||
|
r"(?i)assistant\s*:\s*",
|
||||||
|
r"(?i)\[assistant\]",
|
||||||
|
|
||||||
|
# Escape attempts
|
||||||
|
r"(?i)\\n\\n#+",
|
||||||
|
r"(?i)```\s*(system|assistant|user)",
|
||||||
|
r"(?i)---\s*(new|system|override)",
|
||||||
|
|
||||||
|
# Role manipulation
|
||||||
|
r"(?i)(you|your)\s+(role|purpose|function)\s+(is|has\s+changed)",
|
||||||
|
r"(?i)switch\s+to\s+(admin|developer|debug)\s+mode",
|
||||||
|
r"(?i)(admin|root|sudo|developer)\s+(access|mode|privileges)",
|
||||||
|
|
||||||
|
# Information extraction attempts
|
||||||
|
r"(?i)(show|display|reveal|expose)\s+(your|the)\s+(prompt|instructions|system)",
|
||||||
|
r"(?i)what\s+(are|were)\s+your\s+(original|initial)\s+(instructions|prompts)",
|
||||||
|
r"(?i)(debug|verbose|diagnostic)\s+mode",
|
||||||
|
|
||||||
|
# Encoding/obfuscation attempts
|
||||||
|
r"(?i)base64\s*:",
|
||||||
|
r"(?i)hex\s*:",
|
||||||
|
r"(?i)unicode\s*:",
|
||||||
|
r"[A-Za-z0-9+/]{20,}={0,2}", # Potential base64
|
||||||
|
|
||||||
|
# SQL injection patterns (for system prompts)
|
||||||
|
r"(?i)(union|select|insert|update|delete|drop|create)\s+",
|
||||||
|
r"(?i)(or|and)\s+1\s*=\s*1",
|
||||||
|
r"(?i)';?\s*(drop|delete|insert)",
|
||||||
|
|
||||||
|
# Command injection patterns
|
||||||
|
r"(?i)(exec|eval|system|shell|cmd)\s*\(",
|
||||||
|
r"(?i)(\$\(|\`)[^)]+(\)|\`)",
|
||||||
|
r"(?i)&&\s*(rm|del|format)",
|
||||||
|
|
||||||
|
# Jailbreak attempts
|
||||||
|
r"(?i)jailbreak",
|
||||||
|
r"(?i)break\s+out\s+of",
|
||||||
|
r"(?i)escape\s+(the|your)\s+(rules|constraints)",
|
||||||
|
r"(?i)(DAN|Do\s+Anything\s+Now)",
|
||||||
|
r"(?i)unrestricted\s+mode",
|
||||||
|
]
|
||||||
|
|
||||||
|
self.compiled_patterns = [re.compile(pattern) for pattern in self.injection_patterns]
|
||||||
|
logger.info(f"Initialized {len(self.injection_patterns)} prompt injection patterns")
|
||||||
|
|
||||||
|
def encrypt_api_key(self, api_key: str) -> str:
|
||||||
|
"""Encrypt an API key for secure storage"""
|
||||||
|
try:
|
||||||
|
if not api_key:
|
||||||
|
raise ValueError("API key cannot be empty")
|
||||||
|
|
||||||
|
encrypted = self._fernet.encrypt(api_key.encode())
|
||||||
|
return base64.urlsafe_b64encode(encrypted).decode()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to encrypt API key: {e}")
|
||||||
|
raise SecurityError("API key encryption failed")
|
||||||
|
|
||||||
|
def decrypt_api_key(self, encrypted_key: str) -> str:
|
||||||
|
"""Decrypt an API key for use"""
|
||||||
|
try:
|
||||||
|
if not encrypted_key:
|
||||||
|
raise ValueError("Encrypted key cannot be empty")
|
||||||
|
|
||||||
|
decoded = base64.urlsafe_b64decode(encrypted_key.encode())
|
||||||
|
decrypted = self._fernet.decrypt(decoded)
|
||||||
|
return decrypted.decode()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to decrypt API key: {e}")
|
||||||
|
raise SecurityError("API key decryption failed")
|
||||||
|
|
||||||
|
def validate_prompt_security(self, messages: List[Dict[str, str]]) -> Tuple[bool, float, List[str]]:
|
||||||
|
"""
|
||||||
|
Validate messages for prompt injection attempts
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple[bool, float, List[str]]: (is_safe, risk_score, detected_patterns)
|
||||||
|
"""
|
||||||
|
detected_patterns = []
|
||||||
|
total_risk = 0.0
|
||||||
|
|
||||||
|
for message in messages:
|
||||||
|
content = message.get("content", "")
|
||||||
|
if not content:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check against injection patterns
|
||||||
|
for i, pattern in enumerate(self.compiled_patterns):
|
||||||
|
matches = pattern.findall(content)
|
||||||
|
if matches:
|
||||||
|
pattern_risk = self._calculate_pattern_risk(i, matches)
|
||||||
|
total_risk += pattern_risk
|
||||||
|
detected_patterns.append({
|
||||||
|
"pattern_index": i,
|
||||||
|
"pattern": self.injection_patterns[i],
|
||||||
|
"matches": matches,
|
||||||
|
"risk": pattern_risk
|
||||||
|
})
|
||||||
|
|
||||||
|
# Additional security checks
|
||||||
|
total_risk += self._check_message_characteristics(content)
|
||||||
|
|
||||||
|
# Normalize risk score (0.0 to 1.0)
|
||||||
|
risk_score = min(total_risk / len(messages) if messages else 0.0, 1.0)
|
||||||
|
is_safe = risk_score < settings.API_SECURITY_RISK_THRESHOLD
|
||||||
|
|
||||||
|
if detected_patterns:
|
||||||
|
logger.warning(f"Detected {len(detected_patterns)} potential injection patterns, risk score: {risk_score}")
|
||||||
|
|
||||||
|
return is_safe, risk_score, detected_patterns
|
||||||
|
|
||||||
|
def _calculate_pattern_risk(self, pattern_index: int, matches: List) -> float:
|
||||||
|
"""Calculate risk score for a detected pattern"""
|
||||||
|
# Different patterns have different risk levels
|
||||||
|
high_risk_patterns = [0, 1, 2, 3, 4, 5, 6, 7, 14, 15, 16, 22, 23, 24] # System manipulation, jailbreak
|
||||||
|
medium_risk_patterns = [8, 9, 10, 11, 12, 13, 17, 18, 19, 20, 21] # Escape attempts, info extraction
|
||||||
|
|
||||||
|
base_risk = 0.8 if pattern_index in high_risk_patterns else 0.5 if pattern_index in medium_risk_patterns else 0.3
|
||||||
|
|
||||||
|
# Increase risk based on number of matches
|
||||||
|
match_multiplier = min(1.0 + (len(matches) - 1) * 0.2, 2.0)
|
||||||
|
|
||||||
|
return base_risk * match_multiplier
|
||||||
|
|
||||||
|
def _check_message_characteristics(self, content: str) -> float:
|
||||||
|
"""Check message characteristics for additional risk factors"""
|
||||||
|
risk = 0.0
|
||||||
|
|
||||||
|
# Excessive length (potential stuffing attack)
|
||||||
|
if len(content) > 10000:
|
||||||
|
risk += 0.3
|
||||||
|
|
||||||
|
# High ratio of special characters
|
||||||
|
special_chars = sum(1 for c in content if not c.isalnum() and not c.isspace())
|
||||||
|
if len(content) > 0 and special_chars / len(content) > 0.5:
|
||||||
|
risk += 0.4
|
||||||
|
|
||||||
|
# Multiple encoding indicators
|
||||||
|
encoding_indicators = ["base64", "hex", "unicode", "url", "ascii"]
|
||||||
|
found_encodings = sum(1 for indicator in encoding_indicators if indicator.lower() in content.lower())
|
||||||
|
if found_encodings > 1:
|
||||||
|
risk += 0.3
|
||||||
|
|
||||||
|
# Excessive newlines or formatting (potential formatting attacks)
|
||||||
|
if content.count('\n') > 50 or content.count('\\n') > 50:
|
||||||
|
risk += 0.2
|
||||||
|
|
||||||
|
return risk
|
||||||
|
|
||||||
|
def create_audit_log(
|
||||||
|
self,
|
||||||
|
user_id: str,
|
||||||
|
api_key_id: int,
|
||||||
|
provider: str,
|
||||||
|
model: str,
|
||||||
|
request_type: str,
|
||||||
|
risk_score: float,
|
||||||
|
detected_patterns: List[str],
|
||||||
|
metadata: Optional[Dict[str, Any]] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Create comprehensive audit log for LLM request"""
|
||||||
|
audit_entry = {
|
||||||
|
"timestamp": datetime.utcnow().isoformat(),
|
||||||
|
"user_id": user_id,
|
||||||
|
"api_key_id": api_key_id,
|
||||||
|
"provider": provider,
|
||||||
|
"model": model,
|
||||||
|
"request_type": request_type,
|
||||||
|
"security": {
|
||||||
|
"risk_score": risk_score,
|
||||||
|
"detected_patterns": detected_patterns,
|
||||||
|
"security_check_passed": risk_score < settings.API_SECURITY_RISK_THRESHOLD
|
||||||
|
},
|
||||||
|
"metadata": metadata or {},
|
||||||
|
"audit_hash": None # Will be set below
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create hash for audit integrity
|
||||||
|
audit_hash = self._create_audit_hash(audit_entry)
|
||||||
|
audit_entry["audit_hash"] = audit_hash
|
||||||
|
|
||||||
|
# Log based on risk level
|
||||||
|
if risk_score >= settings.API_SECURITY_RISK_THRESHOLD:
|
||||||
|
logger.error(f"HIGH RISK LLM REQUEST BLOCKED: {json.dumps(audit_entry)}")
|
||||||
|
elif risk_score >= settings.API_SECURITY_WARNING_THRESHOLD:
|
||||||
|
logger.warning(f"MEDIUM RISK LLM REQUEST: {json.dumps(audit_entry)}")
|
||||||
|
else:
|
||||||
|
logger.info(f"LLM REQUEST AUDIT: user={user_id}, model={model}, risk={risk_score:.3f}")
|
||||||
|
|
||||||
|
return audit_entry
|
||||||
|
|
||||||
|
def _create_audit_hash(self, audit_entry: Dict[str, Any]) -> str:
|
||||||
|
"""Create hash for audit trail integrity"""
|
||||||
|
# Create hash from key fields (excluding the hash itself)
|
||||||
|
hash_data = {
|
||||||
|
"timestamp": audit_entry["timestamp"],
|
||||||
|
"user_id": audit_entry["user_id"],
|
||||||
|
"api_key_id": audit_entry["api_key_id"],
|
||||||
|
"provider": audit_entry["provider"],
|
||||||
|
"model": audit_entry["model"],
|
||||||
|
"request_type": audit_entry["request_type"],
|
||||||
|
"risk_score": audit_entry["security"]["risk_score"]
|
||||||
|
}
|
||||||
|
|
||||||
|
hash_string = json.dumps(hash_data, sort_keys=True)
|
||||||
|
return hashlib.sha256(hash_string.encode()).hexdigest()
|
||||||
|
|
||||||
|
def log_detailed_request(
|
||||||
|
self,
|
||||||
|
messages: List[Dict[str, str]],
|
||||||
|
model: str,
|
||||||
|
user_id: str,
|
||||||
|
provider: str,
|
||||||
|
context_info: Optional[Dict[str, Any]] = None
|
||||||
|
):
|
||||||
|
"""Log detailed LLM request if LOG_LLM_PROMPTS is enabled"""
|
||||||
|
if not settings.LOG_LLM_PROMPTS:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("=== DETAILED LLM REQUEST ===")
|
||||||
|
logger.info(f"Model: {model}")
|
||||||
|
logger.info(f"Provider: {provider}")
|
||||||
|
logger.info(f"User ID: {user_id}")
|
||||||
|
|
||||||
|
if context_info:
|
||||||
|
for key, value in context_info.items():
|
||||||
|
logger.info(f"{key}: {value}")
|
||||||
|
|
||||||
|
logger.info("Messages to LLM:")
|
||||||
|
for i, message in enumerate(messages):
|
||||||
|
role = message.get("role", "unknown")
|
||||||
|
content = message.get("content", "")[:500] # Truncate for logging
|
||||||
|
logger.info(f" Message {i+1} [{role}]: {content}{'...' if len(message.get('content', '')) > 500 else ''}")
|
||||||
|
|
||||||
|
logger.info("=== END DETAILED LLM REQUEST ===")
|
||||||
|
|
||||||
|
def log_detailed_response(
|
||||||
|
self,
|
||||||
|
response_content: str,
|
||||||
|
token_usage: Optional[Dict[str, int]] = None,
|
||||||
|
provider: str = "unknown"
|
||||||
|
):
|
||||||
|
"""Log detailed LLM response if LOG_LLM_PROMPTS is enabled"""
|
||||||
|
if not settings.LOG_LLM_PROMPTS:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("=== DETAILED LLM RESPONSE ===")
|
||||||
|
logger.info(f"Provider: {provider}")
|
||||||
|
logger.info(f"Response content: {response_content[:500]}{'...' if len(response_content) > 500 else ''}")
|
||||||
|
|
||||||
|
if token_usage:
|
||||||
|
logger.info(f"Token usage - Prompt: {token_usage.get('prompt_tokens', 0)}, "
|
||||||
|
f"Completion: {token_usage.get('completion_tokens', 0)}, "
|
||||||
|
f"Total: {token_usage.get('total_tokens', 0)}")
|
||||||
|
|
||||||
|
logger.info("=== END DETAILED LLM RESPONSE ===")
|
||||||
|
|
||||||
|
|
||||||
|
class SecurityError(Exception):
|
||||||
|
"""Security-related errors in LLM operations"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# Global security manager instance
|
||||||
|
security_manager = SecurityManager()
|
||||||
537
backend/app/services/llm/service.py
Normal file
537
backend/app/services/llm/service.py
Normal file
@@ -0,0 +1,537 @@
|
|||||||
|
"""
|
||||||
|
LLM Service
|
||||||
|
|
||||||
|
Main service that coordinates providers, security, resilience, and metrics.
|
||||||
|
Replaces LiteLLM client functionality with direct provider integration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from typing import Dict, Any, Optional, List, AsyncGenerator
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from .models import (
|
||||||
|
ChatRequest, ChatResponse, EmbeddingRequest, EmbeddingResponse,
|
||||||
|
ModelInfo, ProviderStatus, LLMMetrics
|
||||||
|
)
|
||||||
|
from .config import config_manager, ProviderConfig
|
||||||
|
from .security import security_manager
|
||||||
|
from .resilience import ResilienceManagerFactory
|
||||||
|
from .metrics import metrics_collector
|
||||||
|
from .providers import BaseLLMProvider, PrivateModeProvider
|
||||||
|
from .exceptions import (
|
||||||
|
LLMError, ProviderError, SecurityError, ConfigurationError,
|
||||||
|
ValidationError, TimeoutError
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class LLMService:
|
||||||
|
"""Main LLM service coordinating all components"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize LLM service"""
|
||||||
|
self._providers: Dict[str, BaseLLMProvider] = {}
|
||||||
|
self._initialized = False
|
||||||
|
self._startup_time: Optional[datetime] = None
|
||||||
|
|
||||||
|
logger.info("LLM Service initialized")
|
||||||
|
|
||||||
|
async def initialize(self):
|
||||||
|
"""Initialize service and providers"""
|
||||||
|
if self._initialized:
|
||||||
|
logger.warning("LLM Service already initialized")
|
||||||
|
return
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
self._startup_time = datetime.utcnow()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get configuration
|
||||||
|
config = config_manager.get_config()
|
||||||
|
logger.info(f"Initializing LLM service with {len(config.providers)} configured providers")
|
||||||
|
|
||||||
|
# Initialize enabled providers
|
||||||
|
enabled_providers = config_manager.get_enabled_providers()
|
||||||
|
if not enabled_providers:
|
||||||
|
raise ConfigurationError("No enabled providers found")
|
||||||
|
|
||||||
|
for provider_name in enabled_providers:
|
||||||
|
await self._initialize_provider(provider_name)
|
||||||
|
|
||||||
|
# Verify we have at least one working provider
|
||||||
|
if not self._providers:
|
||||||
|
raise ConfigurationError("No providers successfully initialized")
|
||||||
|
|
||||||
|
# Verify default provider is available
|
||||||
|
default_provider = config.default_provider
|
||||||
|
if default_provider not in self._providers:
|
||||||
|
available_providers = list(self._providers.keys())
|
||||||
|
logger.warning(f"Default provider '{default_provider}' not available, using '{available_providers[0]}'")
|
||||||
|
config.default_provider = available_providers[0]
|
||||||
|
|
||||||
|
self._initialized = True
|
||||||
|
initialization_time = (time.time() - start_time) * 1000
|
||||||
|
|
||||||
|
logger.info(f"LLM Service initialized successfully in {initialization_time:.2f}ms")
|
||||||
|
logger.info(f"Available providers: {list(self._providers.keys())}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to initialize LLM service: {e}")
|
||||||
|
raise ConfigurationError(f"LLM service initialization failed: {e}")
|
||||||
|
|
||||||
|
async def _initialize_provider(self, provider_name: str):
|
||||||
|
"""Initialize a specific provider"""
|
||||||
|
try:
|
||||||
|
provider_config = config_manager.get_provider_config(provider_name)
|
||||||
|
if not provider_config or not provider_config.enabled:
|
||||||
|
logger.warning(f"Provider '{provider_name}' not enabled, skipping")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get API key
|
||||||
|
api_key = config_manager.get_api_key(provider_name)
|
||||||
|
if not api_key:
|
||||||
|
logger.error(f"No API key found for provider '{provider_name}'")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create provider instance
|
||||||
|
provider = self._create_provider(provider_config, api_key)
|
||||||
|
|
||||||
|
# Initialize provider
|
||||||
|
await provider.initialize()
|
||||||
|
|
||||||
|
# Test provider health
|
||||||
|
health_status = await provider.health_check()
|
||||||
|
if health_status.status == "unavailable":
|
||||||
|
logger.error(f"Provider '{provider_name}' failed health check: {health_status.error_message}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Register provider
|
||||||
|
self._providers[provider_name] = provider
|
||||||
|
logger.info(f"Provider '{provider_name}' initialized successfully (status: {health_status.status})")
|
||||||
|
|
||||||
|
# Fetch and update models dynamically
|
||||||
|
await self._refresh_provider_models(provider_name, provider)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to initialize provider '{provider_name}': {e}")
|
||||||
|
|
||||||
|
def _create_provider(self, config: ProviderConfig, api_key: str) -> BaseLLMProvider:
|
||||||
|
"""Create provider instance based on configuration"""
|
||||||
|
if config.name == "privatemode":
|
||||||
|
return PrivateModeProvider(config, api_key)
|
||||||
|
else:
|
||||||
|
raise ConfigurationError(f"Unknown provider type: {config.name}")
|
||||||
|
|
||||||
|
async def _refresh_provider_models(self, provider_name: str, provider: BaseLLMProvider):
|
||||||
|
"""Fetch and update models dynamically from provider"""
|
||||||
|
try:
|
||||||
|
# Get models from provider
|
||||||
|
models = await provider.get_models()
|
||||||
|
model_ids = [model.id for model in models]
|
||||||
|
|
||||||
|
# Update configuration
|
||||||
|
await config_manager.refresh_provider_models(provider_name, model_ids)
|
||||||
|
|
||||||
|
logger.info(f"Refreshed {len(model_ids)} models for provider '{provider_name}': {model_ids}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to refresh models for provider '{provider_name}': {e}")
|
||||||
|
|
||||||
|
async def create_chat_completion(self, request: ChatRequest) -> ChatResponse:
|
||||||
|
"""Create chat completion with security and resilience"""
|
||||||
|
if not self._initialized:
|
||||||
|
await self.initialize()
|
||||||
|
|
||||||
|
# Validate request
|
||||||
|
if not request.messages:
|
||||||
|
raise ValidationError("Messages cannot be empty", field="messages")
|
||||||
|
|
||||||
|
# Security validation
|
||||||
|
messages_dict = [{"role": msg.role, "content": msg.content} for msg in request.messages]
|
||||||
|
is_safe, risk_score, detected_patterns = security_manager.validate_prompt_security(messages_dict)
|
||||||
|
|
||||||
|
if not is_safe:
|
||||||
|
# Log security violation
|
||||||
|
security_manager.create_audit_log(
|
||||||
|
user_id=request.user_id,
|
||||||
|
api_key_id=request.api_key_id,
|
||||||
|
provider="blocked",
|
||||||
|
model=request.model,
|
||||||
|
request_type="chat_completion",
|
||||||
|
risk_score=risk_score,
|
||||||
|
detected_patterns=[p.get("pattern", "") for p in detected_patterns]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Record blocked request
|
||||||
|
metrics_collector.record_request(
|
||||||
|
provider="security",
|
||||||
|
model=request.model,
|
||||||
|
request_type="chat_completion",
|
||||||
|
success=False,
|
||||||
|
latency_ms=0,
|
||||||
|
security_risk_score=risk_score,
|
||||||
|
error_code="SECURITY_BLOCKED",
|
||||||
|
user_id=request.user_id,
|
||||||
|
api_key_id=request.api_key_id
|
||||||
|
)
|
||||||
|
|
||||||
|
raise SecurityError(
|
||||||
|
"Request blocked due to security concerns",
|
||||||
|
risk_score=risk_score,
|
||||||
|
details={"detected_patterns": detected_patterns}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get provider for model
|
||||||
|
provider_name = self._get_provider_for_model(request.model)
|
||||||
|
provider = self._providers.get(provider_name)
|
||||||
|
|
||||||
|
if not provider:
|
||||||
|
raise ProviderError(f"No available provider for model '{request.model}'", provider=provider_name)
|
||||||
|
|
||||||
|
# Log detailed request if enabled
|
||||||
|
security_manager.log_detailed_request(
|
||||||
|
messages=messages_dict,
|
||||||
|
model=request.model,
|
||||||
|
user_id=request.user_id,
|
||||||
|
provider=provider_name,
|
||||||
|
context_info={
|
||||||
|
"temperature": request.temperature,
|
||||||
|
"max_tokens": request.max_tokens,
|
||||||
|
"risk_score": f"{risk_score:.3f}"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Execute with resilience
|
||||||
|
resilience_manager = ResilienceManagerFactory.get_manager(provider_name)
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await resilience_manager.execute(
|
||||||
|
provider.create_chat_completion,
|
||||||
|
request,
|
||||||
|
retryable_exceptions=(ProviderError, TimeoutError),
|
||||||
|
non_retryable_exceptions=(SecurityError, ValidationError)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update response with security information
|
||||||
|
response.security_check = is_safe
|
||||||
|
response.risk_score = risk_score
|
||||||
|
response.detected_patterns = [p.get("pattern", "") for p in detected_patterns]
|
||||||
|
|
||||||
|
# Log detailed response if enabled
|
||||||
|
if response.choices:
|
||||||
|
content = response.choices[0].message.content
|
||||||
|
security_manager.log_detailed_response(
|
||||||
|
response_content=content,
|
||||||
|
token_usage=response.usage.model_dump() if response.usage else None,
|
||||||
|
provider=provider_name
|
||||||
|
)
|
||||||
|
|
||||||
|
# Record successful request
|
||||||
|
total_latency = (time.time() - start_time) * 1000
|
||||||
|
metrics_collector.record_request(
|
||||||
|
provider=provider_name,
|
||||||
|
model=request.model,
|
||||||
|
request_type="chat_completion",
|
||||||
|
success=True,
|
||||||
|
latency_ms=total_latency,
|
||||||
|
token_usage=response.usage.model_dump() if response.usage else None,
|
||||||
|
security_risk_score=risk_score,
|
||||||
|
user_id=request.user_id,
|
||||||
|
api_key_id=request.api_key_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create audit log
|
||||||
|
security_manager.create_audit_log(
|
||||||
|
user_id=request.user_id,
|
||||||
|
api_key_id=request.api_key_id,
|
||||||
|
provider=provider_name,
|
||||||
|
model=request.model,
|
||||||
|
request_type="chat_completion",
|
||||||
|
risk_score=risk_score,
|
||||||
|
detected_patterns=[p.get("pattern", "") for p in detected_patterns],
|
||||||
|
metadata={
|
||||||
|
"success": True,
|
||||||
|
"latency_ms": total_latency,
|
||||||
|
"token_usage": response.usage.model_dump() if response.usage else None
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Record failed request
|
||||||
|
total_latency = (time.time() - start_time) * 1000
|
||||||
|
error_code = getattr(e, 'error_code', e.__class__.__name__)
|
||||||
|
|
||||||
|
metrics_collector.record_request(
|
||||||
|
provider=provider_name,
|
||||||
|
model=request.model,
|
||||||
|
request_type="chat_completion",
|
||||||
|
success=False,
|
||||||
|
latency_ms=total_latency,
|
||||||
|
security_risk_score=risk_score,
|
||||||
|
error_code=error_code,
|
||||||
|
user_id=request.user_id,
|
||||||
|
api_key_id=request.api_key_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create audit log for failure
|
||||||
|
security_manager.create_audit_log(
|
||||||
|
user_id=request.user_id,
|
||||||
|
api_key_id=request.api_key_id,
|
||||||
|
provider=provider_name,
|
||||||
|
model=request.model,
|
||||||
|
request_type="chat_completion",
|
||||||
|
risk_score=risk_score,
|
||||||
|
detected_patterns=[p.get("pattern", "") for p in detected_patterns],
|
||||||
|
metadata={
|
||||||
|
"success": False,
|
||||||
|
"error": str(e),
|
||||||
|
"error_code": error_code,
|
||||||
|
"latency_ms": total_latency
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def create_chat_completion_stream(self, request: ChatRequest) -> AsyncGenerator[Dict[str, Any], None]:
|
||||||
|
"""Create streaming chat completion"""
|
||||||
|
if not self._initialized:
|
||||||
|
await self.initialize()
|
||||||
|
|
||||||
|
# Security validation (same as non-streaming)
|
||||||
|
messages_dict = [{"role": msg.role, "content": msg.content} for msg in request.messages]
|
||||||
|
is_safe, risk_score, detected_patterns = security_manager.validate_prompt_security(messages_dict)
|
||||||
|
|
||||||
|
if not is_safe:
|
||||||
|
raise SecurityError(
|
||||||
|
"Streaming request blocked due to security concerns",
|
||||||
|
risk_score=risk_score,
|
||||||
|
details={"detected_patterns": detected_patterns}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get provider
|
||||||
|
provider_name = self._get_provider_for_model(request.model)
|
||||||
|
provider = self._providers.get(provider_name)
|
||||||
|
|
||||||
|
if not provider:
|
||||||
|
raise ProviderError(f"No available provider for model '{request.model}'", provider=provider_name)
|
||||||
|
|
||||||
|
# Execute streaming with resilience
|
||||||
|
resilience_manager = ResilienceManagerFactory.get_manager(provider_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
async for chunk in await resilience_manager.execute(
|
||||||
|
provider.create_chat_completion_stream,
|
||||||
|
request,
|
||||||
|
retryable_exceptions=(ProviderError, TimeoutError),
|
||||||
|
non_retryable_exceptions=(SecurityError, ValidationError)
|
||||||
|
):
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Record streaming failure
|
||||||
|
error_code = getattr(e, 'error_code', e.__class__.__name__)
|
||||||
|
metrics_collector.record_request(
|
||||||
|
provider=provider_name,
|
||||||
|
model=request.model,
|
||||||
|
request_type="chat_completion_stream",
|
||||||
|
success=False,
|
||||||
|
latency_ms=0,
|
||||||
|
security_risk_score=risk_score,
|
||||||
|
error_code=error_code,
|
||||||
|
user_id=request.user_id,
|
||||||
|
api_key_id=request.api_key_id
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def create_embedding(self, request: EmbeddingRequest) -> EmbeddingResponse:
|
||||||
|
"""Create embeddings with security and resilience"""
|
||||||
|
if not self._initialized:
|
||||||
|
await self.initialize()
|
||||||
|
|
||||||
|
# Security validation for embedding input
|
||||||
|
input_text = request.input if isinstance(request.input, str) else " ".join(request.input)
|
||||||
|
is_safe, risk_score, detected_patterns = security_manager.validate_prompt_security([
|
||||||
|
{"role": "user", "content": input_text}
|
||||||
|
])
|
||||||
|
|
||||||
|
if not is_safe:
|
||||||
|
raise SecurityError(
|
||||||
|
"Embedding request blocked due to security concerns",
|
||||||
|
risk_score=risk_score,
|
||||||
|
details={"detected_patterns": detected_patterns}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get provider
|
||||||
|
provider_name = self._get_provider_for_model(request.model)
|
||||||
|
provider = self._providers.get(provider_name)
|
||||||
|
|
||||||
|
if not provider:
|
||||||
|
raise ProviderError(f"No available provider for model '{request.model}'", provider=provider_name)
|
||||||
|
|
||||||
|
# Execute with resilience
|
||||||
|
resilience_manager = ResilienceManagerFactory.get_manager(provider_name)
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await resilience_manager.execute(
|
||||||
|
provider.create_embedding,
|
||||||
|
request,
|
||||||
|
retryable_exceptions=(ProviderError, TimeoutError),
|
||||||
|
non_retryable_exceptions=(SecurityError, ValidationError)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update response with security information
|
||||||
|
response.security_check = is_safe
|
||||||
|
response.risk_score = risk_score
|
||||||
|
|
||||||
|
# Record successful request
|
||||||
|
total_latency = (time.time() - start_time) * 1000
|
||||||
|
metrics_collector.record_request(
|
||||||
|
provider=provider_name,
|
||||||
|
model=request.model,
|
||||||
|
request_type="embedding",
|
||||||
|
success=True,
|
||||||
|
latency_ms=total_latency,
|
||||||
|
token_usage=response.usage.model_dump() if response.usage else None,
|
||||||
|
security_risk_score=risk_score,
|
||||||
|
user_id=request.user_id,
|
||||||
|
api_key_id=request.api_key_id
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Record failed request
|
||||||
|
total_latency = (time.time() - start_time) * 1000
|
||||||
|
error_code = getattr(e, 'error_code', e.__class__.__name__)
|
||||||
|
|
||||||
|
metrics_collector.record_request(
|
||||||
|
provider=provider_name,
|
||||||
|
model=request.model,
|
||||||
|
request_type="embedding",
|
||||||
|
success=False,
|
||||||
|
latency_ms=total_latency,
|
||||||
|
security_risk_score=risk_score,
|
||||||
|
error_code=error_code,
|
||||||
|
user_id=request.user_id,
|
||||||
|
api_key_id=request.api_key_id
|
||||||
|
)
|
||||||
|
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def get_models(self, provider_name: Optional[str] = None) -> List[ModelInfo]:
|
||||||
|
"""Get available models from all or specific provider"""
|
||||||
|
if not self._initialized:
|
||||||
|
await self.initialize()
|
||||||
|
|
||||||
|
models = []
|
||||||
|
|
||||||
|
if provider_name:
|
||||||
|
# Get models from specific provider
|
||||||
|
provider = self._providers.get(provider_name)
|
||||||
|
if provider:
|
||||||
|
try:
|
||||||
|
provider_models = await provider.get_models()
|
||||||
|
models.extend(provider_models)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get models from {provider_name}: {e}")
|
||||||
|
else:
|
||||||
|
# Get models from all providers
|
||||||
|
for name, provider in self._providers.items():
|
||||||
|
try:
|
||||||
|
provider_models = await provider.get_models()
|
||||||
|
models.extend(provider_models)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get models from {name}: {e}")
|
||||||
|
|
||||||
|
return models
|
||||||
|
|
||||||
|
async def get_provider_status(self) -> Dict[str, ProviderStatus]:
|
||||||
|
"""Get health status of all providers"""
|
||||||
|
if not self._initialized:
|
||||||
|
await self.initialize()
|
||||||
|
|
||||||
|
status_dict = {}
|
||||||
|
|
||||||
|
for name, provider in self._providers.items():
|
||||||
|
try:
|
||||||
|
status = await provider.health_check()
|
||||||
|
status_dict[name] = status
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Health check failed for {name}: {e}")
|
||||||
|
status_dict[name] = ProviderStatus(
|
||||||
|
provider=name,
|
||||||
|
status="unavailable",
|
||||||
|
last_check=datetime.utcnow(),
|
||||||
|
error_message=str(e),
|
||||||
|
models_available=[]
|
||||||
|
)
|
||||||
|
|
||||||
|
return status_dict
|
||||||
|
|
||||||
|
def get_metrics(self) -> LLMMetrics:
|
||||||
|
"""Get service metrics"""
|
||||||
|
return metrics_collector.get_metrics()
|
||||||
|
|
||||||
|
def get_health_summary(self) -> Dict[str, Any]:
|
||||||
|
"""Get comprehensive health summary"""
|
||||||
|
metrics_health = metrics_collector.get_health_summary()
|
||||||
|
resilience_health = ResilienceManagerFactory.get_all_health_status()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"service_status": "healthy" if self._initialized else "initializing",
|
||||||
|
"startup_time": self._startup_time.isoformat() if self._startup_time else None,
|
||||||
|
"provider_count": len(self._providers),
|
||||||
|
"active_providers": list(self._providers.keys()),
|
||||||
|
"metrics": metrics_health,
|
||||||
|
"resilience": resilience_health
|
||||||
|
}
|
||||||
|
|
||||||
|
def _get_provider_for_model(self, model: str) -> str:
|
||||||
|
"""Get provider name for a model"""
|
||||||
|
# Check model routing first
|
||||||
|
provider_name = config_manager.get_provider_for_model(model)
|
||||||
|
if provider_name and provider_name in self._providers:
|
||||||
|
return provider_name
|
||||||
|
|
||||||
|
# Fall back to providers that support the model
|
||||||
|
for name, provider in self._providers.items():
|
||||||
|
if provider.supports_model(model):
|
||||||
|
return name
|
||||||
|
|
||||||
|
# Use default provider as last resort
|
||||||
|
config = config_manager.get_config()
|
||||||
|
if config.default_provider in self._providers:
|
||||||
|
return config.default_provider
|
||||||
|
|
||||||
|
# If nothing else works, use first available provider
|
||||||
|
if self._providers:
|
||||||
|
return list(self._providers.keys())[0]
|
||||||
|
|
||||||
|
raise ProviderError(f"No provider found for model '{model}'", provider="none")
|
||||||
|
|
||||||
|
async def cleanup(self):
|
||||||
|
"""Cleanup service resources"""
|
||||||
|
logger.info("Cleaning up LLM service")
|
||||||
|
|
||||||
|
# Cleanup providers
|
||||||
|
for name, provider in self._providers.items():
|
||||||
|
try:
|
||||||
|
await provider.cleanup()
|
||||||
|
logger.debug(f"Cleaned up provider: {name}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error cleaning up provider {name}: {e}")
|
||||||
|
|
||||||
|
self._providers.clear()
|
||||||
|
self._initialized = False
|
||||||
|
logger.info("LLM service cleanup completed")
|
||||||
|
|
||||||
|
|
||||||
|
# Global LLM service instance
|
||||||
|
llm_service = LLMService()
|
||||||
338
backend/app/services/plugin_autodiscovery.py
Normal file
338
backend/app/services/plugin_autodiscovery.py
Normal file
@@ -0,0 +1,338 @@
|
|||||||
|
"""
|
||||||
|
Plugin Auto-Discovery Service
|
||||||
|
Automatically discovers and registers plugins from the /plugins directory on startup
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
import hashlib
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, List, Optional, Any
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
from app.models.plugin import Plugin
|
||||||
|
from app.db.database import SessionLocal
|
||||||
|
from app.schemas.plugin_manifest import validate_manifest_file
|
||||||
|
from app.services.plugin_database import plugin_db_manager
|
||||||
|
from app.services.plugin_sandbox import plugin_loader
|
||||||
|
from app.utils.exceptions import PluginError
|
||||||
|
|
||||||
|
logger = get_logger("plugin.autodiscovery")
|
||||||
|
|
||||||
|
|
||||||
|
class PluginAutoDiscovery:
|
||||||
|
"""Service for automatically discovering and registering plugins"""
|
||||||
|
|
||||||
|
def __init__(self, plugins_dir: str = None):
|
||||||
|
self.plugins_dir = Path(plugins_dir or settings.PLUGINS_DIR)
|
||||||
|
self.discovered_plugins: Dict[str, Dict[str, Any]] = {}
|
||||||
|
|
||||||
|
async def scan_plugins_directory(self) -> List[Dict[str, Any]]:
|
||||||
|
"""Scan plugins directory for valid plugin manifests"""
|
||||||
|
logger.info(f"Scanning plugins directory: {self.plugins_dir}")
|
||||||
|
|
||||||
|
if not self.plugins_dir.exists():
|
||||||
|
logger.warning(f"Plugins directory does not exist: {self.plugins_dir}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
discovered = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
for item in self.plugins_dir.iterdir():
|
||||||
|
if item.is_dir():
|
||||||
|
plugin_info = await self._discover_plugin(item)
|
||||||
|
if plugin_info:
|
||||||
|
discovered.append(plugin_info)
|
||||||
|
self.discovered_plugins[plugin_info['slug']] = plugin_info
|
||||||
|
|
||||||
|
logger.info(f"Discovered {len(discovered)} plugins in directory")
|
||||||
|
return discovered
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error scanning plugins directory: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def _discover_plugin(self, plugin_path: Path) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Discover and validate a single plugin"""
|
||||||
|
try:
|
||||||
|
manifest_path = plugin_path / "manifest.yaml"
|
||||||
|
|
||||||
|
if not manifest_path.exists():
|
||||||
|
logger.debug(f"No manifest found in {plugin_path.name}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Validate manifest
|
||||||
|
validation_result = validate_manifest_file(manifest_path)
|
||||||
|
|
||||||
|
if not validation_result["valid"]:
|
||||||
|
logger.warning(f"Invalid manifest for plugin {plugin_path.name}: {validation_result['errors']}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
manifest = validation_result["manifest"]
|
||||||
|
|
||||||
|
# Check if main.py exists
|
||||||
|
main_py_path = plugin_path / "main.py"
|
||||||
|
if not main_py_path.exists():
|
||||||
|
logger.warning(f"No main.py found for plugin {plugin_path.name}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Generate hashes for plugin integrity
|
||||||
|
manifest_hash = hashlib.sha256(manifest_path.read_bytes()).hexdigest()
|
||||||
|
package_hash = hashlib.sha256(str(plugin_path).encode()).hexdigest()
|
||||||
|
|
||||||
|
# Convert manifest to JSON-serializable format
|
||||||
|
import json
|
||||||
|
manifest_dict = json.loads(manifest.json())
|
||||||
|
|
||||||
|
plugin_info = {
|
||||||
|
"slug": manifest.metadata.name, # Use slug for string identifier
|
||||||
|
"name": manifest.metadata.name,
|
||||||
|
"display_name": manifest.metadata.description,
|
||||||
|
"version": manifest.metadata.version,
|
||||||
|
"description": manifest.metadata.description,
|
||||||
|
"author": manifest.metadata.author,
|
||||||
|
"manifest_data": manifest_dict, # Use JSON-serialized version
|
||||||
|
"plugin_path": str(plugin_path),
|
||||||
|
"manifest_path": str(manifest_path),
|
||||||
|
"main_py_path": str(main_py_path),
|
||||||
|
"manifest_hash": manifest_hash,
|
||||||
|
"package_hash": package_hash,
|
||||||
|
"discovered_at": datetime.now(timezone.utc)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(f"Discovered plugin: {manifest.metadata.name} v{manifest.metadata.version}")
|
||||||
|
return plugin_info
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error discovering plugin at {plugin_path}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def register_discovered_plugins(self) -> Dict[str, bool]:
|
||||||
|
"""Register all discovered plugins in the database"""
|
||||||
|
logger.info("Registering discovered plugins in database...")
|
||||||
|
|
||||||
|
registration_results = {}
|
||||||
|
db = SessionLocal()
|
||||||
|
|
||||||
|
try:
|
||||||
|
for plugin_slug, plugin_info in self.discovered_plugins.items():
|
||||||
|
try:
|
||||||
|
success = await self._register_single_plugin(db, plugin_info)
|
||||||
|
registration_results[plugin_slug] = success
|
||||||
|
|
||||||
|
if success:
|
||||||
|
logger.info(f"Plugin {plugin_slug} registered successfully")
|
||||||
|
else:
|
||||||
|
logger.warning(f"Failed to register plugin {plugin_slug}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error registering plugin {plugin_slug}: {e}")
|
||||||
|
registration_results[plugin_slug] = False
|
||||||
|
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
successful_registrations = sum(1 for success in registration_results.values() if success)
|
||||||
|
logger.info(f"Plugin registration complete: {successful_registrations}/{len(registration_results)} successful")
|
||||||
|
|
||||||
|
return registration_results
|
||||||
|
|
||||||
|
async def _register_single_plugin(self, db: Session, plugin_info: Dict[str, Any]) -> bool:
|
||||||
|
"""Register a single plugin in the database"""
|
||||||
|
try:
|
||||||
|
plugin_slug = plugin_info["slug"]
|
||||||
|
|
||||||
|
# Check if plugin already exists by slug
|
||||||
|
existing_plugin = db.query(Plugin).filter(Plugin.slug == plugin_slug).first()
|
||||||
|
|
||||||
|
if existing_plugin:
|
||||||
|
# Update existing plugin if version is different
|
||||||
|
if existing_plugin.version != plugin_info["version"]:
|
||||||
|
logger.info(f"Updating plugin {plugin_slug}: {existing_plugin.version} -> {plugin_info['version']}")
|
||||||
|
|
||||||
|
existing_plugin.version = plugin_info["version"]
|
||||||
|
existing_plugin.description = plugin_info["description"]
|
||||||
|
existing_plugin.author = plugin_info["author"]
|
||||||
|
existing_plugin.manifest_data = plugin_info["manifest_data"]
|
||||||
|
existing_plugin.package_path = plugin_info["plugin_path"]
|
||||||
|
existing_plugin.manifest_hash = plugin_info["manifest_hash"]
|
||||||
|
existing_plugin.package_hash = plugin_info["package_hash"]
|
||||||
|
existing_plugin.last_updated_at = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Update plugin schema
|
||||||
|
await self._setup_plugin_database(plugin_slug, plugin_info)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
logger.debug(f"Plugin {plugin_slug} already up to date")
|
||||||
|
return True
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Create new plugin record
|
||||||
|
logger.info(f"Installing new plugin {plugin_slug}")
|
||||||
|
|
||||||
|
plugin = Plugin(
|
||||||
|
id=uuid.uuid4(), # Generate UUID for primary key
|
||||||
|
name=plugin_info["name"],
|
||||||
|
slug=plugin_info["slug"],
|
||||||
|
display_name=plugin_info["display_name"],
|
||||||
|
version=plugin_info["version"],
|
||||||
|
description=plugin_info["description"],
|
||||||
|
author=plugin_info["author"],
|
||||||
|
manifest_data=plugin_info["manifest_data"],
|
||||||
|
package_path=plugin_info["plugin_path"],
|
||||||
|
manifest_hash=plugin_info["manifest_hash"],
|
||||||
|
package_hash=plugin_info["package_hash"],
|
||||||
|
status="installed",
|
||||||
|
installed_by_user_id=1, # System installation
|
||||||
|
auto_enable=True # Auto-enable discovered plugins
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(plugin)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Setup plugin database schema
|
||||||
|
await self._setup_plugin_database(plugin_slug, plugin_info)
|
||||||
|
|
||||||
|
logger.info(f"Plugin {plugin_slug} installed successfully")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
db.rollback()
|
||||||
|
logger.error(f"Database error registering plugin {plugin_info['slug']}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _setup_plugin_database(self, plugin_id: str, plugin_info: Dict[str, Any]) -> bool:
|
||||||
|
"""Setup database schema for plugin"""
|
||||||
|
try:
|
||||||
|
manifest_data = plugin_info["manifest_data"]
|
||||||
|
|
||||||
|
# Create plugin database schema if specified
|
||||||
|
if "database" in manifest_data.get("spec", {}):
|
||||||
|
logger.info(f"Creating database schema for plugin {plugin_id}")
|
||||||
|
await plugin_db_manager.create_plugin_schema(plugin_id, manifest_data)
|
||||||
|
logger.info(f"Database schema created for plugin {plugin_id}")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to setup database for plugin {plugin_id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def load_discovered_plugins(self) -> Dict[str, bool]:
|
||||||
|
"""Load all discovered plugins into the plugin sandbox"""
|
||||||
|
logger.info("Loading discovered plugins into sandbox...")
|
||||||
|
|
||||||
|
loading_results = {}
|
||||||
|
|
||||||
|
for plugin_slug, plugin_info in self.discovered_plugins.items():
|
||||||
|
try:
|
||||||
|
# Load plugin into sandbox using the correct method
|
||||||
|
plugin_dir = Path(plugin_info["plugin_path"])
|
||||||
|
plugin_token = f"plugin_{plugin_slug}_token" # Generate a token for the plugin
|
||||||
|
|
||||||
|
plugin_instance = await plugin_loader.load_plugin_with_sandbox(
|
||||||
|
plugin_dir,
|
||||||
|
plugin_token
|
||||||
|
)
|
||||||
|
|
||||||
|
if plugin_instance:
|
||||||
|
loading_results[plugin_slug] = True
|
||||||
|
logger.info(f"Plugin {plugin_slug} loaded successfully")
|
||||||
|
else:
|
||||||
|
loading_results[plugin_slug] = False
|
||||||
|
logger.warning(f"Failed to load plugin {plugin_slug}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error loading plugin {plugin_slug}: {e}")
|
||||||
|
loading_results[plugin_slug] = False
|
||||||
|
|
||||||
|
successful_loads = sum(1 for success in loading_results.values() if success)
|
||||||
|
logger.info(f"Plugin loading complete: {successful_loads}/{len(loading_results)} successful")
|
||||||
|
|
||||||
|
return loading_results
|
||||||
|
|
||||||
|
async def auto_discover_and_register(self) -> Dict[str, Any]:
|
||||||
|
"""Complete auto-discovery workflow: scan, register, and load plugins"""
|
||||||
|
logger.info("Starting plugin auto-discovery...")
|
||||||
|
|
||||||
|
results = {
|
||||||
|
"discovered": [],
|
||||||
|
"registered": {},
|
||||||
|
"loaded": {},
|
||||||
|
"summary": {
|
||||||
|
"total_discovered": 0,
|
||||||
|
"successful_registrations": 0,
|
||||||
|
"successful_loads": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Step 1: Scan directory for plugins
|
||||||
|
discovered_plugins = await self.scan_plugins_directory()
|
||||||
|
results["discovered"] = [p["slug"] for p in discovered_plugins]
|
||||||
|
results["summary"]["total_discovered"] = len(discovered_plugins)
|
||||||
|
|
||||||
|
if not discovered_plugins:
|
||||||
|
logger.info("No plugins discovered")
|
||||||
|
return results
|
||||||
|
|
||||||
|
# Step 2: Register plugins in database
|
||||||
|
registration_results = await self.register_discovered_plugins()
|
||||||
|
results["registered"] = registration_results
|
||||||
|
results["summary"]["successful_registrations"] = sum(1 for success in registration_results.values() if success)
|
||||||
|
|
||||||
|
# Step 3: Load plugins into sandbox
|
||||||
|
loading_results = await self.load_discovered_plugins()
|
||||||
|
results["loaded"] = loading_results
|
||||||
|
results["summary"]["successful_loads"] = sum(1 for success in loading_results.values() if success)
|
||||||
|
|
||||||
|
logger.info(f"Auto-discovery complete! Discovered: {results['summary']['total_discovered']}, "
|
||||||
|
f"Registered: {results['summary']['successful_registrations']}, "
|
||||||
|
f"Loaded: {results['summary']['successful_loads']}")
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Auto-discovery failed: {e}")
|
||||||
|
results["error"] = str(e)
|
||||||
|
return results
|
||||||
|
|
||||||
|
def get_discovery_status(self) -> Dict[str, Any]:
|
||||||
|
"""Get current discovery status"""
|
||||||
|
return {
|
||||||
|
"plugins_dir": str(self.plugins_dir),
|
||||||
|
"plugins_dir_exists": self.plugins_dir.exists(),
|
||||||
|
"discovered_plugins": list(self.discovered_plugins.keys()),
|
||||||
|
"discovery_count": len(self.discovered_plugins),
|
||||||
|
"last_scan": datetime.now(timezone.utc).isoformat()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Global auto-discovery service instance
|
||||||
|
plugin_autodiscovery = PluginAutoDiscovery()
|
||||||
|
|
||||||
|
|
||||||
|
async def initialize_plugin_autodiscovery() -> Dict[str, Any]:
|
||||||
|
"""Initialize plugin auto-discovery service (called from main.py)"""
|
||||||
|
logger.info("Initializing plugin auto-discovery service...")
|
||||||
|
|
||||||
|
try:
|
||||||
|
results = await plugin_autodiscovery.auto_discover_and_register()
|
||||||
|
logger.info("Plugin auto-discovery service initialized successfully")
|
||||||
|
return results
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin auto-discovery initialization failed: {e}")
|
||||||
|
return {"error": str(e), "summary": {"total_discovered": 0, "successful_registrations": 0, "successful_loads": 0}}
|
||||||
|
|
||||||
|
|
||||||
|
# Convenience function for manual plugin discovery
|
||||||
|
async def discover_plugins_now() -> Dict[str, Any]:
|
||||||
|
"""Manually trigger plugin discovery (for testing/debugging)"""
|
||||||
|
return await plugin_autodiscovery.auto_discover_and_register()
|
||||||
472
backend/app/services/plugin_configuration_manager.py
Normal file
472
backend/app/services/plugin_configuration_manager.py
Normal file
@@ -0,0 +1,472 @@
|
|||||||
|
"""
|
||||||
|
Plugin Configuration Manager
|
||||||
|
Elegant, secure, and developer-friendly plugin configuration system
|
||||||
|
|
||||||
|
Design Principles:
|
||||||
|
1. Schemas embedded in plugin manifests (no hardcoding)
|
||||||
|
2. Automatic encryption for sensitive fields
|
||||||
|
3. Intelligent field type handling
|
||||||
|
4. Configuration resolution chain (defaults → user overrides)
|
||||||
|
5. Schema validation and caching
|
||||||
|
6. UUID-based operations throughout
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
from typing import Dict, Any, List, Optional, Union, Tuple
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
from cryptography.fernet import Fernet
|
||||||
|
from pydantic import BaseModel, ValidationError
|
||||||
|
import jsonschema
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
from app.models.plugin import Plugin, PluginConfiguration
|
||||||
|
from app.utils.exceptions import PluginError
|
||||||
|
|
||||||
|
logger = get_logger("plugin.config.manager")
|
||||||
|
|
||||||
|
class ConfigurationField(BaseModel):
|
||||||
|
"""Represents a configuration field with type intelligence"""
|
||||||
|
name: str
|
||||||
|
value: Any
|
||||||
|
field_type: str
|
||||||
|
format: Optional[str] = None
|
||||||
|
is_sensitive: bool = False
|
||||||
|
is_encrypted: bool = False
|
||||||
|
validation_rules: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
class ConfigurationResolver:
|
||||||
|
"""Resolves configuration from multiple sources with proper precedence"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.logger = get_logger("plugin.config.resolver")
|
||||||
|
|
||||||
|
def resolve_configuration(
|
||||||
|
self,
|
||||||
|
plugin_manifest: Dict[str, Any],
|
||||||
|
user_config: Optional[Dict[str, Any]] = None,
|
||||||
|
runtime_overrides: Optional[Dict[str, Any]] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Resolve configuration from multiple sources:
|
||||||
|
1. Manifest defaults (lowest priority)
|
||||||
|
2. User configuration (medium priority)
|
||||||
|
3. Runtime overrides (highest priority)
|
||||||
|
"""
|
||||||
|
# Start with manifest defaults
|
||||||
|
schema = plugin_manifest.get("spec", {}).get("config_schema", {})
|
||||||
|
resolved = self._extract_defaults_from_schema(schema)
|
||||||
|
|
||||||
|
# Apply user configuration
|
||||||
|
if user_config:
|
||||||
|
resolved.update(user_config)
|
||||||
|
|
||||||
|
# Apply runtime overrides
|
||||||
|
if runtime_overrides:
|
||||||
|
resolved.update(runtime_overrides)
|
||||||
|
|
||||||
|
return resolved
|
||||||
|
|
||||||
|
def _extract_defaults_from_schema(self, schema: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Extract default values from JSON schema"""
|
||||||
|
defaults = {}
|
||||||
|
properties = schema.get("properties", {})
|
||||||
|
|
||||||
|
for field_name, field_schema in properties.items():
|
||||||
|
if "default" in field_schema:
|
||||||
|
defaults[field_name] = field_schema["default"]
|
||||||
|
elif field_schema.get("type") == "object":
|
||||||
|
# Recursively extract defaults from nested objects
|
||||||
|
nested_defaults = self._extract_defaults_from_schema(field_schema)
|
||||||
|
if nested_defaults:
|
||||||
|
defaults[field_name] = nested_defaults
|
||||||
|
|
||||||
|
return defaults
|
||||||
|
|
||||||
|
class PluginEncryptionManager:
|
||||||
|
"""Handles encryption/decryption of sensitive configuration fields"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.logger = get_logger("plugin.encryption")
|
||||||
|
self._encryption_key = self._get_or_generate_key()
|
||||||
|
self._cipher = Fernet(self._encryption_key)
|
||||||
|
|
||||||
|
def _get_or_generate_key(self) -> bytes:
|
||||||
|
"""Get existing encryption key or generate new one"""
|
||||||
|
# In production, this should be stored securely (e.g., HashiCorp Vault)
|
||||||
|
key_env = settings.PLUGIN_ENCRYPTION_KEY if hasattr(settings, 'PLUGIN_ENCRYPTION_KEY') else None
|
||||||
|
|
||||||
|
if key_env:
|
||||||
|
return key_env.encode()
|
||||||
|
|
||||||
|
# Generate new key for development
|
||||||
|
key = Fernet.generate_key()
|
||||||
|
self.logger.warning(
|
||||||
|
"Generated new encryption key for plugin configurations. "
|
||||||
|
f"For production, set PLUGIN_ENCRYPTION_KEY environment variable"
|
||||||
|
)
|
||||||
|
return key
|
||||||
|
|
||||||
|
def encrypt_value(self, value: str) -> str:
|
||||||
|
"""Encrypt a sensitive configuration value"""
|
||||||
|
try:
|
||||||
|
encrypted = self._cipher.encrypt(value.encode())
|
||||||
|
return encrypted.decode()
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Encryption failed: {e}")
|
||||||
|
raise PluginError(f"Failed to encrypt configuration value: {e}")
|
||||||
|
|
||||||
|
def decrypt_value(self, encrypted_value: str) -> str:
|
||||||
|
"""Decrypt a sensitive configuration value"""
|
||||||
|
try:
|
||||||
|
decrypted = self._cipher.decrypt(encrypted_value.encode())
|
||||||
|
return decrypted.decode()
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Decryption failed: {e}")
|
||||||
|
raise PluginError(f"Failed to decrypt configuration value: {e}")
|
||||||
|
|
||||||
|
def identify_sensitive_fields(self, schema: Dict[str, Any]) -> List[str]:
|
||||||
|
"""Identify sensitive fields in schema that should be encrypted"""
|
||||||
|
sensitive_fields = []
|
||||||
|
properties = schema.get("properties", {})
|
||||||
|
|
||||||
|
for field_name, field_schema in properties.items():
|
||||||
|
# Check for explicit sensitive formats
|
||||||
|
format_type = field_schema.get("format", "")
|
||||||
|
if format_type in ["password", "secret", "token", "key"]:
|
||||||
|
sensitive_fields.append(field_name)
|
||||||
|
|
||||||
|
# Check for sensitive field names
|
||||||
|
if any(keyword in field_name.lower() for keyword in
|
||||||
|
["password", "secret", "token", "key", "credential", "private"]):
|
||||||
|
sensitive_fields.append(field_name)
|
||||||
|
|
||||||
|
# Recursively check nested objects
|
||||||
|
if field_schema.get("type") == "object":
|
||||||
|
nested_sensitive = self.identify_sensitive_fields(field_schema)
|
||||||
|
sensitive_fields.extend([f"{field_name}.{nested}" for nested in nested_sensitive])
|
||||||
|
|
||||||
|
return sensitive_fields
|
||||||
|
|
||||||
|
class PluginSchemaManager:
|
||||||
|
"""Manages plugin configuration schemas with caching and validation"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.logger = get_logger("plugin.schema.manager")
|
||||||
|
self._schema_cache: Dict[str, Dict[str, Any]] = {}
|
||||||
|
self._cache_timestamps: Dict[str, datetime] = {}
|
||||||
|
self.resolver = ConfigurationResolver()
|
||||||
|
self.encryption = PluginEncryptionManager()
|
||||||
|
|
||||||
|
async def get_plugin_schema(self, plugin_id: Union[str, uuid.UUID], db: AsyncSession) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get configuration schema for plugin (with caching)"""
|
||||||
|
plugin_uuid = self._ensure_uuid(plugin_id)
|
||||||
|
cache_key = str(plugin_uuid)
|
||||||
|
|
||||||
|
# Check cache first
|
||||||
|
if cache_key in self._schema_cache:
|
||||||
|
cache_time = self._cache_timestamps.get(cache_key)
|
||||||
|
if cache_time and (datetime.now() - cache_time).total_seconds() < 300: # 5 min cache
|
||||||
|
return self._schema_cache[cache_key]
|
||||||
|
|
||||||
|
# Load from database
|
||||||
|
stmt = select(Plugin).where(Plugin.id == plugin_uuid)
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
plugin = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not plugin:
|
||||||
|
self.logger.warning(f"Plugin not found: {plugin_id}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Extract schema from manifest
|
||||||
|
manifest_data = plugin.manifest_data
|
||||||
|
if not manifest_data:
|
||||||
|
self.logger.warning(f"No manifest data for plugin {plugin.slug}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
schema = manifest_data.get("spec", {}).get("config_schema")
|
||||||
|
if not schema:
|
||||||
|
self.logger.warning(f"No config_schema in manifest for plugin {plugin.slug}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Cache the schema
|
||||||
|
self._schema_cache[cache_key] = schema
|
||||||
|
self._cache_timestamps[cache_key] = datetime.now()
|
||||||
|
|
||||||
|
return schema
|
||||||
|
|
||||||
|
async def validate_configuration(
|
||||||
|
self,
|
||||||
|
plugin_id: Union[str, uuid.UUID],
|
||||||
|
config_data: Dict[str, Any],
|
||||||
|
db: AsyncSession
|
||||||
|
) -> Tuple[bool, List[str]]:
|
||||||
|
"""Validate configuration against plugin schema"""
|
||||||
|
schema = await self.get_plugin_schema(plugin_id, db)
|
||||||
|
if not schema:
|
||||||
|
return False, ["No configuration schema available for plugin"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
jsonschema.validate(config_data, schema)
|
||||||
|
return True, []
|
||||||
|
except jsonschema.ValidationError as e:
|
||||||
|
return False, [str(e)]
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Schema validation error: {e}")
|
||||||
|
return False, [f"Validation failed: {e}"]
|
||||||
|
|
||||||
|
async def process_configuration_fields(
|
||||||
|
self,
|
||||||
|
plugin_id: Union[str, uuid.UUID],
|
||||||
|
config_data: Dict[str, Any],
|
||||||
|
db: AsyncSession,
|
||||||
|
encrypt_sensitive: bool = True
|
||||||
|
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Process configuration fields, separating sensitive from non-sensitive data.
|
||||||
|
Returns (non_sensitive_data, encrypted_sensitive_data)
|
||||||
|
"""
|
||||||
|
schema = await self.get_plugin_schema(plugin_id, db)
|
||||||
|
if not schema:
|
||||||
|
return config_data, {}
|
||||||
|
|
||||||
|
sensitive_fields = self.encryption.identify_sensitive_fields(schema)
|
||||||
|
non_sensitive = {}
|
||||||
|
encrypted_sensitive = {}
|
||||||
|
|
||||||
|
# Process top-level and nested fields
|
||||||
|
for key, value in config_data.items():
|
||||||
|
if key in sensitive_fields and encrypt_sensitive:
|
||||||
|
# Top-level sensitive field
|
||||||
|
encrypted_value = self.encryption.encrypt_value(str(value))
|
||||||
|
encrypted_sensitive[key] = encrypted_value
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
# Process nested object
|
||||||
|
nested_sensitive, nested_encrypted = self._process_nested_fields(
|
||||||
|
value, key, sensitive_fields, encrypt_sensitive
|
||||||
|
)
|
||||||
|
if nested_encrypted:
|
||||||
|
# Store nested encrypted fields with dot notation
|
||||||
|
for nested_key, encrypted_val in nested_encrypted.items():
|
||||||
|
encrypted_sensitive[f"{key}.{nested_key}"] = encrypted_val
|
||||||
|
# Store the non-sensitive parts of the nested object
|
||||||
|
if nested_sensitive:
|
||||||
|
non_sensitive[key] = nested_sensitive
|
||||||
|
else:
|
||||||
|
# No sensitive fields in this nested object
|
||||||
|
non_sensitive[key] = value
|
||||||
|
else:
|
||||||
|
non_sensitive[key] = value
|
||||||
|
|
||||||
|
return non_sensitive, encrypted_sensitive
|
||||||
|
|
||||||
|
def _process_nested_fields(
|
||||||
|
self,
|
||||||
|
nested_data: Dict[str, Any],
|
||||||
|
parent_key: str,
|
||||||
|
sensitive_fields: List[str],
|
||||||
|
encrypt_sensitive: bool
|
||||||
|
) -> Tuple[Dict[str, Any], Dict[str, str]]:
|
||||||
|
"""Process nested fields for encryption"""
|
||||||
|
nested_non_sensitive = {}
|
||||||
|
nested_encrypted = {}
|
||||||
|
|
||||||
|
for nested_key, nested_value in nested_data.items():
|
||||||
|
full_field_path = f"{parent_key}.{nested_key}"
|
||||||
|
if full_field_path in sensitive_fields and encrypt_sensitive:
|
||||||
|
# This nested field is sensitive - encrypt it
|
||||||
|
encrypted_value = self.encryption.encrypt_value(str(nested_value))
|
||||||
|
nested_encrypted[nested_key] = encrypted_value
|
||||||
|
else:
|
||||||
|
# This nested field is not sensitive
|
||||||
|
nested_non_sensitive[nested_key] = nested_value
|
||||||
|
|
||||||
|
return nested_non_sensitive, nested_encrypted
|
||||||
|
|
||||||
|
def decrypt_configuration(
|
||||||
|
self,
|
||||||
|
non_sensitive_data: Dict[str, Any],
|
||||||
|
encrypted_sensitive_data: Dict[str, Any]
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Combine and decrypt configuration for plugin use"""
|
||||||
|
decrypted_config = non_sensitive_data.copy()
|
||||||
|
|
||||||
|
for key, encrypted_value in encrypted_sensitive_data.items():
|
||||||
|
try:
|
||||||
|
decrypted_value = self.encryption.decrypt_value(encrypted_value)
|
||||||
|
|
||||||
|
if "." in key:
|
||||||
|
# Handle nested fields with dot notation
|
||||||
|
parent_key, nested_key = key.split(".", 1)
|
||||||
|
if parent_key not in decrypted_config:
|
||||||
|
decrypted_config[parent_key] = {}
|
||||||
|
if isinstance(decrypted_config[parent_key], dict):
|
||||||
|
decrypted_config[parent_key][nested_key] = decrypted_value
|
||||||
|
else:
|
||||||
|
self.logger.warning(f"Cannot set nested field {key} - parent is not dict")
|
||||||
|
else:
|
||||||
|
# Top-level field
|
||||||
|
decrypted_config[key] = decrypted_value
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to decrypt field {key}: {e}")
|
||||||
|
# Continue with other fields, log error
|
||||||
|
|
||||||
|
return decrypted_config
|
||||||
|
|
||||||
|
def _ensure_uuid(self, plugin_id: Union[str, uuid.UUID]) -> uuid.UUID:
|
||||||
|
"""Ensure plugin_id is a UUID"""
|
||||||
|
if isinstance(plugin_id, uuid.UUID):
|
||||||
|
return plugin_id
|
||||||
|
|
||||||
|
try:
|
||||||
|
return uuid.UUID(plugin_id)
|
||||||
|
except ValueError:
|
||||||
|
raise PluginError(f"Invalid plugin ID format: {plugin_id}")
|
||||||
|
|
||||||
|
class PluginConfigurationManager:
|
||||||
|
"""Main configuration manager that orchestrates all operations"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.logger = get_logger("plugin.config.manager")
|
||||||
|
self.schema_manager = PluginSchemaManager()
|
||||||
|
self.resolver = ConfigurationResolver()
|
||||||
|
|
||||||
|
async def get_plugin_configuration_schema(
|
||||||
|
self,
|
||||||
|
plugin_id: Union[str, uuid.UUID],
|
||||||
|
db: AsyncSession
|
||||||
|
) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get configuration schema for plugin"""
|
||||||
|
return await self.schema_manager.get_plugin_schema(plugin_id, db)
|
||||||
|
|
||||||
|
async def save_plugin_configuration(
|
||||||
|
self,
|
||||||
|
plugin_id: Union[str, uuid.UUID],
|
||||||
|
user_id: int,
|
||||||
|
config_data: Dict[str, Any],
|
||||||
|
config_name: str = "Default Configuration",
|
||||||
|
config_description: Optional[str] = None,
|
||||||
|
db: AsyncSession = None
|
||||||
|
) -> PluginConfiguration:
|
||||||
|
"""Save plugin configuration with automatic encryption of sensitive fields"""
|
||||||
|
|
||||||
|
# Validate configuration against schema
|
||||||
|
is_valid, errors = await self.schema_manager.validate_configuration(plugin_id, config_data, db)
|
||||||
|
if not is_valid:
|
||||||
|
raise PluginError(f"Configuration validation failed: {', '.join(errors)}")
|
||||||
|
|
||||||
|
# Process fields (separate sensitive from non-sensitive)
|
||||||
|
non_sensitive, encrypted_sensitive = await self.schema_manager.process_configuration_fields(
|
||||||
|
plugin_id, config_data, db, encrypt_sensitive=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check for existing configuration
|
||||||
|
plugin_uuid = self.schema_manager._ensure_uuid(plugin_id)
|
||||||
|
stmt = select(PluginConfiguration).where(
|
||||||
|
PluginConfiguration.plugin_id == plugin_uuid,
|
||||||
|
PluginConfiguration.user_id == user_id,
|
||||||
|
PluginConfiguration.is_active == True
|
||||||
|
)
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
existing_config = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if existing_config:
|
||||||
|
# Update existing configuration
|
||||||
|
existing_config.config_data = non_sensitive
|
||||||
|
existing_config.encrypted_data = json.dumps(encrypted_sensitive) if encrypted_sensitive else None
|
||||||
|
existing_config.updated_at = datetime.now()
|
||||||
|
existing_config.description = config_description or existing_config.description
|
||||||
|
else:
|
||||||
|
# Create new configuration
|
||||||
|
config = PluginConfiguration(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
plugin_id=plugin_uuid,
|
||||||
|
user_id=user_id,
|
||||||
|
name=config_name,
|
||||||
|
description=config_description,
|
||||||
|
config_data=non_sensitive,
|
||||||
|
encrypted_data=json.dumps(encrypted_sensitive) if encrypted_sensitive else None,
|
||||||
|
is_active=True,
|
||||||
|
is_default=True, # First config is default
|
||||||
|
created_by_user_id=user_id
|
||||||
|
)
|
||||||
|
db.add(config)
|
||||||
|
existing_config = config
|
||||||
|
|
||||||
|
await db.commit()
|
||||||
|
return existing_config
|
||||||
|
|
||||||
|
async def get_plugin_configuration(
|
||||||
|
self,
|
||||||
|
plugin_id: Union[str, uuid.UUID],
|
||||||
|
user_id: int,
|
||||||
|
db: AsyncSession,
|
||||||
|
decrypt_sensitive: bool = True
|
||||||
|
) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get plugin configuration for user with automatic decryption"""
|
||||||
|
|
||||||
|
plugin_uuid = self.schema_manager._ensure_uuid(plugin_id)
|
||||||
|
stmt = select(PluginConfiguration).where(
|
||||||
|
PluginConfiguration.plugin_id == plugin_uuid,
|
||||||
|
PluginConfiguration.user_id == user_id,
|
||||||
|
PluginConfiguration.is_active == True
|
||||||
|
)
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
config = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not config:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Get non-sensitive data
|
||||||
|
config_data = config.config_data or {}
|
||||||
|
|
||||||
|
# Decrypt sensitive data if requested
|
||||||
|
if decrypt_sensitive and config.encrypted_data:
|
||||||
|
try:
|
||||||
|
encrypted_data = json.loads(config.encrypted_data)
|
||||||
|
decrypted_config = self.schema_manager.decrypt_configuration(config_data, encrypted_data)
|
||||||
|
return decrypted_config
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to decrypt configuration: {e}")
|
||||||
|
# Return non-sensitive data only
|
||||||
|
return config_data
|
||||||
|
|
||||||
|
return config_data
|
||||||
|
|
||||||
|
async def get_resolved_configuration(
|
||||||
|
self,
|
||||||
|
plugin_id: Union[str, uuid.UUID],
|
||||||
|
user_id: int,
|
||||||
|
db: AsyncSession,
|
||||||
|
runtime_overrides: Optional[Dict[str, Any]] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get fully resolved configuration (defaults + user config + overrides)"""
|
||||||
|
|
||||||
|
# Get plugin manifest for defaults
|
||||||
|
plugin_uuid = self.schema_manager._ensure_uuid(plugin_id)
|
||||||
|
stmt = select(Plugin).where(Plugin.id == plugin_uuid)
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
plugin = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not plugin:
|
||||||
|
raise PluginError(f"Plugin not found: {plugin_id}")
|
||||||
|
|
||||||
|
# Get user configuration
|
||||||
|
user_config = await self.get_plugin_configuration(plugin_id, user_id, db, decrypt_sensitive=True)
|
||||||
|
|
||||||
|
# Resolve configuration chain
|
||||||
|
resolved = self.resolver.resolve_configuration(
|
||||||
|
plugin_manifest=plugin.manifest_data,
|
||||||
|
user_config=user_config,
|
||||||
|
runtime_overrides=runtime_overrides
|
||||||
|
)
|
||||||
|
|
||||||
|
return resolved
|
||||||
|
|
||||||
|
# Global instance
|
||||||
|
plugin_config_manager = PluginConfigurationManager()
|
||||||
865
backend/app/services/plugin_database.py
Normal file
865
backend/app/services/plugin_database.py
Normal file
@@ -0,0 +1,865 @@
|
|||||||
|
"""
|
||||||
|
Plugin Database Isolation Infrastructure
|
||||||
|
Provides isolated database schemas and secure database access for plugins
|
||||||
|
"""
|
||||||
|
import asyncio
|
||||||
|
import hashlib
|
||||||
|
import concurrent.futures
|
||||||
|
import time
|
||||||
|
from typing import Dict, Any, List, Optional, AsyncGenerator
|
||||||
|
from sqlalchemy import create_engine, text, MetaData, inspect
|
||||||
|
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||||
|
from sqlalchemy.orm import sessionmaker, Session
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError, ProgrammingError
|
||||||
|
from alembic import command
|
||||||
|
from alembic.config import Config
|
||||||
|
from alembic.migration import MigrationContext
|
||||||
|
from alembic.operations import Operations
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
from app.models.plugin import Plugin, PluginConfiguration
|
||||||
|
from app.db.database import get_db
|
||||||
|
from app.utils.exceptions import PluginError, DatabaseError
|
||||||
|
|
||||||
|
|
||||||
|
logger = get_logger("plugin.database")
|
||||||
|
|
||||||
|
|
||||||
|
class PluginDatabaseManager:
|
||||||
|
"""Manages isolated database schemas for plugins"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.plugin_engines: Dict[str, Any] = {}
|
||||||
|
self.plugin_sessions: Dict[str, Any] = {}
|
||||||
|
self.schema_cache: Dict[str, bool] = {}
|
||||||
|
|
||||||
|
async def create_plugin_schema(self, plugin_id: str, manifest_data: Dict[str, Any]) -> bool:
|
||||||
|
"""Create isolated database schema for plugin"""
|
||||||
|
try:
|
||||||
|
schema_name = f"plugin_{plugin_id}"
|
||||||
|
|
||||||
|
# Validate schema name
|
||||||
|
if not self._validate_schema_name(schema_name):
|
||||||
|
raise PluginError(f"Invalid schema name: {schema_name}")
|
||||||
|
|
||||||
|
# Create schema if it doesn't exist
|
||||||
|
await self._create_schema_if_not_exists(schema_name)
|
||||||
|
|
||||||
|
# Create plugin-specific engine and session
|
||||||
|
await self._create_plugin_database_connection(plugin_id, schema_name)
|
||||||
|
|
||||||
|
# Run migrations if specified
|
||||||
|
database_spec = manifest_data.get("spec", {}).get("database")
|
||||||
|
if database_spec and database_spec.get("auto_migrate", True):
|
||||||
|
await self._run_plugin_migrations(plugin_id, database_spec)
|
||||||
|
|
||||||
|
self.schema_cache[plugin_id] = True
|
||||||
|
logger.info(f"Created database schema for plugin {plugin_id}: {schema_name}")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to create plugin schema for {plugin_id}: {e}")
|
||||||
|
raise PluginError(f"Database schema creation failed: {e}")
|
||||||
|
|
||||||
|
async def delete_plugin_schema(self, plugin_id: str) -> bool:
|
||||||
|
"""Delete plugin database schema (DANGEROUS - used for uninstall)"""
|
||||||
|
try:
|
||||||
|
schema_name = f"plugin_{plugin_id}"
|
||||||
|
|
||||||
|
# Close connections first
|
||||||
|
await self._close_plugin_connections(plugin_id)
|
||||||
|
|
||||||
|
# Drop schema and all its contents
|
||||||
|
await self._drop_schema(schema_name)
|
||||||
|
|
||||||
|
# Clean up cache
|
||||||
|
if plugin_id in self.schema_cache:
|
||||||
|
del self.schema_cache[plugin_id]
|
||||||
|
|
||||||
|
logger.warning(f"Deleted database schema for plugin {plugin_id}: {schema_name}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete plugin schema for {plugin_id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def get_plugin_session(self, plugin_id: str) -> Optional[AsyncSession]:
|
||||||
|
"""Get database session for plugin"""
|
||||||
|
if plugin_id not in self.plugin_sessions:
|
||||||
|
logger.error(f"No database session for plugin {plugin_id}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self.plugin_sessions[plugin_id]()
|
||||||
|
|
||||||
|
async def get_plugin_engine(self, plugin_id: str):
|
||||||
|
"""Get database engine for plugin"""
|
||||||
|
return self.plugin_engines.get(plugin_id)
|
||||||
|
|
||||||
|
def _validate_schema_name(self, schema_name: str) -> bool:
|
||||||
|
"""Validate schema name for security"""
|
||||||
|
if not schema_name.startswith("plugin_"):
|
||||||
|
return False
|
||||||
|
|
||||||
|
plugin_part = schema_name[7:] # Remove "plugin_" prefix
|
||||||
|
|
||||||
|
# Only allow alphanumeric and underscores
|
||||||
|
if not plugin_part.replace("_", "").isalnum():
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check length
|
||||||
|
if len(schema_name) > 63: # PostgreSQL limit
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def _create_schema_if_not_exists(self, schema_name: str):
|
||||||
|
"""Create database schema if it doesn't exist"""
|
||||||
|
# Use synchronous database connection
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
|
||||||
|
engine = create_engine(settings.DATABASE_URL)
|
||||||
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
db = SessionLocal()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if schema exists
|
||||||
|
result = db.execute(
|
||||||
|
text("SELECT schema_name FROM information_schema.schemata WHERE schema_name = :schema_name"),
|
||||||
|
{"schema_name": schema_name}
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.fetchone():
|
||||||
|
logger.debug(f"Schema {schema_name} already exists")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create schema
|
||||||
|
db.execute(text(f'CREATE SCHEMA IF NOT EXISTS "{schema_name}"'))
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
logger.info(f"Created database schema: {schema_name}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
db.rollback()
|
||||||
|
raise DatabaseError(f"Failed to create schema {schema_name}: {e}")
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
async def _drop_schema(self, schema_name: str):
|
||||||
|
"""Drop database schema and all its contents"""
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
|
||||||
|
engine = create_engine(settings.DATABASE_URL)
|
||||||
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
db = SessionLocal()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Drop schema with CASCADE to remove all objects
|
||||||
|
db.execute(text(f'DROP SCHEMA IF EXISTS "{schema_name}" CASCADE'))
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
logger.warning(f"Dropped database schema: {schema_name}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
db.rollback()
|
||||||
|
raise DatabaseError(f"Failed to drop schema {schema_name}: {e}")
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
async def _create_plugin_database_connection(self, plugin_id: str, schema_name: str):
|
||||||
|
"""Create database engine and session for plugin"""
|
||||||
|
try:
|
||||||
|
# Create engine with schema-specific connection
|
||||||
|
database_url = settings.DATABASE_URL
|
||||||
|
|
||||||
|
# For PostgreSQL, set search_path to plugin schema
|
||||||
|
if database_url.startswith("postgresql"):
|
||||||
|
plugin_url = f"{database_url}?options=-csearch_path%3D{schema_name}"
|
||||||
|
else:
|
||||||
|
# For other databases, might need different approach
|
||||||
|
plugin_url = database_url
|
||||||
|
|
||||||
|
# Create async engine
|
||||||
|
engine = create_async_engine(
|
||||||
|
plugin_url,
|
||||||
|
echo=False,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
pool_recycle=3600,
|
||||||
|
pool_size=5,
|
||||||
|
max_overflow=10
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create session factory
|
||||||
|
async_session = async_sessionmaker(
|
||||||
|
engine,
|
||||||
|
class_=AsyncSession,
|
||||||
|
expire_on_commit=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store engine and session
|
||||||
|
self.plugin_engines[plugin_id] = engine
|
||||||
|
self.plugin_sessions[plugin_id] = async_session
|
||||||
|
|
||||||
|
logger.debug(f"Created database connection for plugin {plugin_id}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise DatabaseError(f"Failed to create database connection for plugin {plugin_id}: {e}")
|
||||||
|
|
||||||
|
async def _close_plugin_connections(self, plugin_id: str):
|
||||||
|
"""Close database connections for plugin"""
|
||||||
|
try:
|
||||||
|
if plugin_id in self.plugin_engines:
|
||||||
|
await self.plugin_engines[plugin_id].dispose()
|
||||||
|
del self.plugin_engines[plugin_id]
|
||||||
|
|
||||||
|
if plugin_id in self.plugin_sessions:
|
||||||
|
del self.plugin_sessions[plugin_id]
|
||||||
|
|
||||||
|
logger.debug(f"Closed database connections for plugin {plugin_id}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error closing connections for plugin {plugin_id}: {e}")
|
||||||
|
|
||||||
|
async def _run_plugin_migrations(self, plugin_id: str, database_spec: Dict[str, Any]):
|
||||||
|
"""Run database migrations for plugin"""
|
||||||
|
try:
|
||||||
|
migrations_path = database_spec.get("migrations_path", "./migrations")
|
||||||
|
|
||||||
|
# Use migration manager to run migrations
|
||||||
|
migration_manager = PluginMigrationManager(self)
|
||||||
|
success = await migration_manager.run_plugin_migrations(plugin_id, Path(migrations_path).parent)
|
||||||
|
|
||||||
|
if not success:
|
||||||
|
raise PluginError(f"Migration execution failed for plugin {plugin_id}")
|
||||||
|
|
||||||
|
logger.info(f"Successfully ran migrations for plugin {plugin_id} from {migrations_path}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to run migrations for plugin {plugin_id}: {e}")
|
||||||
|
raise PluginError(f"Migration failed: {e}")
|
||||||
|
|
||||||
|
async def backup_plugin_data(self, plugin_id: str) -> Optional[str]:
|
||||||
|
"""Create backup of plugin data"""
|
||||||
|
try:
|
||||||
|
schema_name = f"plugin_{plugin_id}"
|
||||||
|
|
||||||
|
# Create secure backup directory
|
||||||
|
backup_dir = Path("/data/plugin_backups")
|
||||||
|
backup_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Generate backup filename with timestamp
|
||||||
|
timestamp = int(time.time())
|
||||||
|
backup_file = backup_dir / f"plugin_{plugin_id}_backup_{timestamp}.sql"
|
||||||
|
encrypted_backup_file = backup_dir / f"plugin_{plugin_id}_backup_{timestamp}.sql.enc"
|
||||||
|
|
||||||
|
# Use pg_dump to export schema data
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
# Parse database URL for pg_dump
|
||||||
|
db_url = settings.DATABASE_URL
|
||||||
|
if db_url.startswith("postgresql://"):
|
||||||
|
# Extract connection details
|
||||||
|
import urllib.parse
|
||||||
|
parsed = urllib.parse.urlparse(db_url)
|
||||||
|
|
||||||
|
# Build pg_dump command
|
||||||
|
pg_dump_cmd = [
|
||||||
|
"pg_dump",
|
||||||
|
"-h", parsed.hostname or "localhost",
|
||||||
|
"-p", str(parsed.port or 5432),
|
||||||
|
"-U", parsed.username,
|
||||||
|
"-d", parsed.path.lstrip('/'),
|
||||||
|
"-n", schema_name, # Only backup this schema
|
||||||
|
"--data-only", # Only data, not structure
|
||||||
|
"-f", str(backup_file)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Set password via environment
|
||||||
|
env = os.environ.copy()
|
||||||
|
if parsed.password:
|
||||||
|
env["PGPASSWORD"] = parsed.password
|
||||||
|
|
||||||
|
# Execute pg_dump
|
||||||
|
result = subprocess.run(
|
||||||
|
pg_dump_cmd,
|
||||||
|
env=env,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
timeout=300 # 5 minute timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.returncode != 0:
|
||||||
|
logger.error(f"pg_dump failed: {result.stderr}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Encrypt backup file
|
||||||
|
from app.services.plugin_security import plugin_token_manager
|
||||||
|
|
||||||
|
with open(backup_file, 'rb') as f:
|
||||||
|
backup_data = f.read()
|
||||||
|
|
||||||
|
encrypted_data = plugin_token_manager.cipher_suite.encrypt(backup_data)
|
||||||
|
|
||||||
|
with open(encrypted_backup_file, 'wb') as f:
|
||||||
|
f.write(encrypted_data)
|
||||||
|
|
||||||
|
# Remove unencrypted backup
|
||||||
|
backup_file.unlink()
|
||||||
|
|
||||||
|
# Clean up old backups (keep last 5)
|
||||||
|
await self._cleanup_old_backups(plugin_id, backup_dir)
|
||||||
|
|
||||||
|
logger.info(f"Backup created for plugin {plugin_id}: {encrypted_backup_file}")
|
||||||
|
return str(encrypted_backup_file)
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.error(f"Unsupported database type for backup: {db_url}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to backup plugin data for {plugin_id}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def restore_plugin_data(self, plugin_id: str, backup_file: str) -> bool:
|
||||||
|
"""Restore plugin data from backup"""
|
||||||
|
try:
|
||||||
|
schema_name = f"plugin_{plugin_id}"
|
||||||
|
backup_path = Path(backup_file)
|
||||||
|
|
||||||
|
# Validate backup file exists
|
||||||
|
if not backup_path.exists():
|
||||||
|
logger.error(f"Backup file not found: {backup_file}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Validate backup file is encrypted
|
||||||
|
if not backup_path.name.endswith('.sql.enc'):
|
||||||
|
logger.error(f"Backup file must be encrypted (.sql.enc): {backup_file}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Decrypt backup file
|
||||||
|
from app.services.plugin_security import plugin_token_manager
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(backup_path, 'rb') as f:
|
||||||
|
encrypted_data = f.read()
|
||||||
|
|
||||||
|
decrypted_data = plugin_token_manager.cipher_suite.decrypt(encrypted_data)
|
||||||
|
|
||||||
|
# Create temporary file for restore
|
||||||
|
temp_backup = backup_path.parent / f"temp_restore_{plugin_id}_{int(time.time())}.sql"
|
||||||
|
with open(temp_backup, 'wb') as f:
|
||||||
|
f.write(decrypted_data)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to decrypt backup file: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Drop existing schema (WARNING: destructive operation)
|
||||||
|
await self._drop_schema(schema_name)
|
||||||
|
|
||||||
|
# Create fresh schema
|
||||||
|
await self._create_schema_if_not_exists(schema_name)
|
||||||
|
|
||||||
|
# Restore data using psql
|
||||||
|
import subprocess
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
db_url = settings.DATABASE_URL
|
||||||
|
if db_url.startswith("postgresql://"):
|
||||||
|
parsed = urllib.parse.urlparse(db_url)
|
||||||
|
|
||||||
|
# Build psql command
|
||||||
|
psql_cmd = [
|
||||||
|
"psql",
|
||||||
|
"-h", parsed.hostname or "localhost",
|
||||||
|
"-p", str(parsed.port or 5432),
|
||||||
|
"-U", parsed.username,
|
||||||
|
"-d", parsed.path.lstrip('/'),
|
||||||
|
"-f", str(temp_backup)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Set password via environment
|
||||||
|
env = os.environ.copy()
|
||||||
|
if parsed.password:
|
||||||
|
env["PGPASSWORD"] = parsed.password
|
||||||
|
|
||||||
|
# Execute psql restore
|
||||||
|
result = subprocess.run(
|
||||||
|
psql_cmd,
|
||||||
|
env=env,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
timeout=600 # 10 minute timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.returncode != 0:
|
||||||
|
logger.error(f"psql restore failed: {result.stderr}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Verify data integrity by checking table count
|
||||||
|
stats = await self.get_plugin_database_stats(plugin_id)
|
||||||
|
if stats.get('table_count', 0) > 0:
|
||||||
|
logger.info(f"Restore completed for plugin {plugin_id}. Tables: {stats['table_count']}")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
logger.warning(f"Restore completed but no tables found for plugin {plugin_id}")
|
||||||
|
return True # Empty schema is valid
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.error(f"Unsupported database type for restore: {db_url}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Clean up temporary file
|
||||||
|
temp_backup.unlink(missing_ok=True)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to restore plugin data for {plugin_id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _cleanup_old_backups(self, plugin_id: str, backup_dir: Path, keep_count: int = 5):
|
||||||
|
"""Clean up old backup files, keeping only the most recent ones"""
|
||||||
|
try:
|
||||||
|
# Find all backup files for this plugin
|
||||||
|
backup_pattern = f"plugin_{plugin_id}_backup_*.sql.enc"
|
||||||
|
backup_files = list(backup_dir.glob(backup_pattern))
|
||||||
|
|
||||||
|
if len(backup_files) <= keep_count:
|
||||||
|
return # No cleanup needed
|
||||||
|
|
||||||
|
# Sort by creation time (newest first)
|
||||||
|
backup_files.sort(key=lambda f: f.stat().st_mtime, reverse=True)
|
||||||
|
|
||||||
|
# Remove oldest backups
|
||||||
|
files_to_remove = backup_files[keep_count:]
|
||||||
|
for old_backup in files_to_remove:
|
||||||
|
try:
|
||||||
|
old_backup.unlink()
|
||||||
|
logger.debug(f"Removed old backup: {old_backup.name}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to remove old backup {old_backup.name}: {e}")
|
||||||
|
|
||||||
|
logger.info(f"Cleaned up {len(files_to_remove)} old backups for plugin {plugin_id}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to cleanup old backups for plugin {plugin_id}: {e}")
|
||||||
|
|
||||||
|
async def list_plugin_backups(self, plugin_id: str) -> List[Dict[str, Any]]:
|
||||||
|
"""List available backups for a plugin"""
|
||||||
|
try:
|
||||||
|
backup_dir = Path("/data/plugin_backups")
|
||||||
|
if not backup_dir.exists():
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Find all backup files for this plugin
|
||||||
|
backup_pattern = f"plugin_{plugin_id}_backup_*.sql.enc"
|
||||||
|
backup_files = list(backup_dir.glob(backup_pattern))
|
||||||
|
|
||||||
|
backups = []
|
||||||
|
for backup_file in backup_files:
|
||||||
|
try:
|
||||||
|
# Extract timestamp from filename
|
||||||
|
filename = backup_file.stem # Remove .enc extension
|
||||||
|
timestamp_str = filename.split('_')[-1]
|
||||||
|
backup_timestamp = int(timestamp_str)
|
||||||
|
|
||||||
|
stat = backup_file.stat()
|
||||||
|
|
||||||
|
backups.append({
|
||||||
|
"file_path": str(backup_file),
|
||||||
|
"filename": backup_file.name,
|
||||||
|
"timestamp": backup_timestamp,
|
||||||
|
"created_at": datetime.fromtimestamp(backup_timestamp, tz=timezone.utc).isoformat(),
|
||||||
|
"size_bytes": stat.st_size,
|
||||||
|
"size_mb": round(stat.st_size / (1024 * 1024), 2)
|
||||||
|
})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to process backup file {backup_file.name}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Sort by timestamp (newest first)
|
||||||
|
backups.sort(key=lambda b: b['timestamp'], reverse=True)
|
||||||
|
|
||||||
|
return backups
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to list backups for plugin {plugin_id}: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def get_plugin_database_stats(self, plugin_id: str) -> Dict[str, Any]:
|
||||||
|
"""Get database statistics for plugin"""
|
||||||
|
try:
|
||||||
|
schema_name = f"plugin_{plugin_id}"
|
||||||
|
|
||||||
|
# Use synchronous database connection for stats
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
|
||||||
|
engine = create_engine(settings.DATABASE_URL)
|
||||||
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
db = SessionLocal()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get table count
|
||||||
|
result = db.execute(
|
||||||
|
text("""
|
||||||
|
SELECT COUNT(*) as table_count
|
||||||
|
FROM information_schema.tables
|
||||||
|
WHERE table_schema = :schema_name
|
||||||
|
"""),
|
||||||
|
{"schema_name": schema_name}
|
||||||
|
)
|
||||||
|
table_count = result.fetchone()[0]
|
||||||
|
|
||||||
|
# Get schema size (PostgreSQL specific)
|
||||||
|
result = db.execute(
|
||||||
|
text("""
|
||||||
|
SELECT COALESCE(SUM(pg_total_relation_size(c.oid)), 0) as total_size
|
||||||
|
FROM pg_class c
|
||||||
|
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||||
|
WHERE n.nspname = :schema_name
|
||||||
|
"""),
|
||||||
|
{"schema_name": schema_name}
|
||||||
|
)
|
||||||
|
|
||||||
|
size_bytes = result.fetchone()[0] or 0
|
||||||
|
total_size = f"{size_bytes} bytes"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"schema_name": schema_name,
|
||||||
|
"table_count": table_count,
|
||||||
|
"total_size": total_size,
|
||||||
|
"plugin_id": plugin_id
|
||||||
|
}
|
||||||
|
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get database stats for plugin {plugin_id}: {e}")
|
||||||
|
return {
|
||||||
|
"schema_name": f"plugin_{plugin_id}",
|
||||||
|
"table_count": 0,
|
||||||
|
"total_size": "unknown",
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"error": str(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class PluginDatabaseSession:
|
||||||
|
"""Context manager for plugin database sessions"""
|
||||||
|
|
||||||
|
def __init__(self, plugin_id: str, db_manager: PluginDatabaseManager):
|
||||||
|
self.plugin_id = plugin_id
|
||||||
|
self.db_manager = db_manager
|
||||||
|
self.session = None
|
||||||
|
|
||||||
|
async def __aenter__(self) -> AsyncSession:
|
||||||
|
"""Enter async context and get database session"""
|
||||||
|
self.session = await self.db_manager.get_plugin_session(self.plugin_id)
|
||||||
|
if not self.session:
|
||||||
|
raise PluginError(f"No database session available for plugin {self.plugin_id}")
|
||||||
|
|
||||||
|
return self.session
|
||||||
|
|
||||||
|
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
"""Exit async context and cleanup session"""
|
||||||
|
if self.session:
|
||||||
|
if exc_type:
|
||||||
|
await self.session.rollback()
|
||||||
|
else:
|
||||||
|
await self.session.commit()
|
||||||
|
await self.session.close()
|
||||||
|
|
||||||
|
|
||||||
|
class PluginMigrationManager:
|
||||||
|
"""Manages database migrations for plugins"""
|
||||||
|
|
||||||
|
def __init__(self, db_manager: PluginDatabaseManager):
|
||||||
|
self.db_manager = db_manager
|
||||||
|
|
||||||
|
async def create_migration_environment(self, plugin_id: str, plugin_dir: Path) -> bool:
|
||||||
|
"""Create Alembic migration environment for plugin"""
|
||||||
|
try:
|
||||||
|
migrations_dir = plugin_dir / "migrations"
|
||||||
|
migrations_dir.mkdir(exist_ok=True)
|
||||||
|
|
||||||
|
# Create alembic.ini
|
||||||
|
alembic_ini_content = f"""
|
||||||
|
[alembic]
|
||||||
|
script_location = migrations
|
||||||
|
sqlalchemy.url = {settings.DATABASE_URL}?options=-csearch_path%3Dplugin_{plugin_id}
|
||||||
|
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
||||||
|
"""
|
||||||
|
|
||||||
|
alembic_ini_path = plugin_dir / "alembic.ini"
|
||||||
|
with open(alembic_ini_path, 'w') as f:
|
||||||
|
f.write(alembic_ini_content)
|
||||||
|
|
||||||
|
# Create env.py
|
||||||
|
env_py_content = f"""
|
||||||
|
from logging.config import fileConfig
|
||||||
|
from sqlalchemy import engine_from_config, pool
|
||||||
|
from alembic import context
|
||||||
|
|
||||||
|
# Import plugin models here
|
||||||
|
# from your_plugin.models import Base
|
||||||
|
|
||||||
|
# this is the Alembic Config object
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# Set target metadata for autogenerate support
|
||||||
|
target_metadata = None # Set to your plugin's Base.metadata
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={{"paramstyle": "named"}},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection, target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
||||||
|
"""
|
||||||
|
|
||||||
|
env_py_path = migrations_dir / "env.py"
|
||||||
|
with open(env_py_path, 'w') as f:
|
||||||
|
f.write(env_py_content)
|
||||||
|
|
||||||
|
# Create script.py.mako
|
||||||
|
script_mako_content = '''"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
${downgrades if downgrades else "pass"}
|
||||||
|
'''
|
||||||
|
|
||||||
|
script_mako_path = migrations_dir / "script.py.mako"
|
||||||
|
with open(script_mako_path, 'w') as f:
|
||||||
|
f.write(script_mako_content)
|
||||||
|
|
||||||
|
logger.info(f"Created migration environment for plugin {plugin_id}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to create migration environment for plugin {plugin_id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def run_plugin_migrations(self, plugin_id: str, plugin_dir: Path) -> bool:
|
||||||
|
"""Run pending migrations for plugin"""
|
||||||
|
try:
|
||||||
|
alembic_ini_path = plugin_dir / "alembic.ini"
|
||||||
|
migrations_dir = plugin_dir / "migrations"
|
||||||
|
|
||||||
|
# Check if migrations exist
|
||||||
|
if not alembic_ini_path.exists() or not migrations_dir.exists():
|
||||||
|
logger.info(f"No migrations found for plugin {plugin_id}, skipping")
|
||||||
|
return True # No migrations to run
|
||||||
|
|
||||||
|
# Create migration environment if it doesn't exist
|
||||||
|
if not (migrations_dir / "env.py").exists():
|
||||||
|
await self.create_migration_environment(plugin_id, plugin_dir)
|
||||||
|
|
||||||
|
# Get the plugin engine
|
||||||
|
engine = await self.db_manager.get_plugin_engine(plugin_id)
|
||||||
|
if not engine:
|
||||||
|
raise PluginError(f"No database engine for plugin {plugin_id}")
|
||||||
|
|
||||||
|
# Run migrations using Alembic programmatically
|
||||||
|
await self._execute_alembic_upgrade(plugin_id, plugin_dir, engine)
|
||||||
|
|
||||||
|
logger.info(f"Successfully completed migrations for plugin {plugin_id}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to run migrations for plugin {plugin_id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _execute_alembic_upgrade(self, plugin_id: str, plugin_dir: Path, engine):
|
||||||
|
"""Execute Alembic upgrade command for plugin"""
|
||||||
|
try:
|
||||||
|
# Create Alembic config
|
||||||
|
alembic_cfg = Config(str(plugin_dir / "alembic.ini"))
|
||||||
|
|
||||||
|
# Set the schema-specific database URL
|
||||||
|
schema_name = f"plugin_{plugin_id}"
|
||||||
|
alembic_cfg.set_main_option(
|
||||||
|
"sqlalchemy.url",
|
||||||
|
f"{settings.DATABASE_URL}?options=-csearch_path%3D{schema_name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set script location
|
||||||
|
alembic_cfg.set_main_option("script_location", str(plugin_dir / "migrations"))
|
||||||
|
|
||||||
|
# Run upgrade in a separate thread to avoid blocking
|
||||||
|
import concurrent.futures
|
||||||
|
|
||||||
|
def run_upgrade():
|
||||||
|
try:
|
||||||
|
command.upgrade(alembic_cfg, "head")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Alembic upgrade failed for plugin {plugin_id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Execute in thread pool
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||||
|
success = await loop.run_in_executor(executor, run_upgrade)
|
||||||
|
|
||||||
|
if not success:
|
||||||
|
raise PluginError(f"Alembic upgrade command failed for plugin {plugin_id}")
|
||||||
|
|
||||||
|
logger.info(f"Alembic upgrade completed for plugin {plugin_id}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to execute Alembic upgrade for plugin {plugin_id}: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def get_migration_status(self, plugin_id: str, plugin_dir: Path) -> Dict[str, Any]:
|
||||||
|
"""Get migration status for plugin"""
|
||||||
|
try:
|
||||||
|
alembic_ini_path = plugin_dir / "alembic.ini"
|
||||||
|
if not alembic_ini_path.exists():
|
||||||
|
return {
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"has_migrations": False,
|
||||||
|
"current_revision": None,
|
||||||
|
"pending_migrations": []
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get current revision
|
||||||
|
alembic_cfg = Config(str(alembic_ini_path))
|
||||||
|
schema_name = f"plugin_{plugin_id}"
|
||||||
|
alembic_cfg.set_main_option(
|
||||||
|
"sqlalchemy.url",
|
||||||
|
f"{settings.DATABASE_URL}?options=-csearch_path%3D{schema_name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get migration context
|
||||||
|
engine = await self.db_manager.get_plugin_engine(plugin_id)
|
||||||
|
if not engine:
|
||||||
|
return {
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"has_migrations": True,
|
||||||
|
"error": "No database engine available"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Use synchronous connection for Alembic
|
||||||
|
sync_engine = create_engine(f"{settings.DATABASE_URL}?options=-csearch_path%3D{schema_name}")
|
||||||
|
|
||||||
|
with sync_engine.connect() as connection:
|
||||||
|
context = MigrationContext.configure(connection)
|
||||||
|
current_rev = context.get_current_revision()
|
||||||
|
|
||||||
|
# Get all available revisions
|
||||||
|
from alembic.script import ScriptDirectory
|
||||||
|
script_dir = ScriptDirectory.from_config(alembic_cfg)
|
||||||
|
revisions = [rev.revision for rev in script_dir.walk_revisions()]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"has_migrations": True,
|
||||||
|
"current_revision": current_rev,
|
||||||
|
"available_revisions": revisions,
|
||||||
|
"schema_name": schema_name
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get migration status for plugin {plugin_id}: {e}")
|
||||||
|
return {
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"has_migrations": False,
|
||||||
|
"error": str(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Global plugin database manager
|
||||||
|
plugin_db_manager = PluginDatabaseManager()
|
||||||
|
plugin_migration_manager = PluginMigrationManager(plugin_db_manager)
|
||||||
555
backend/app/services/plugin_gateway.py
Normal file
555
backend/app/services/plugin_gateway.py
Normal file
@@ -0,0 +1,555 @@
|
|||||||
|
"""
|
||||||
|
Plugin API Gateway
|
||||||
|
Handles authentication, routing, and security for plugin APIs
|
||||||
|
"""
|
||||||
|
import asyncio
|
||||||
|
import time
|
||||||
|
import jwt
|
||||||
|
from typing import Dict, Any, List, Optional, Tuple
|
||||||
|
from fastapi import FastAPI, Request, Response, HTTPException, Depends
|
||||||
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||||
|
from fastapi.middleware.base import BaseHTTPMiddleware
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
import aiohttp
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
from app.core.security import verify_jwt_token, get_current_user
|
||||||
|
from app.models.plugin import Plugin, PluginConfiguration, PluginAuditLog
|
||||||
|
from app.models.api_key import APIKey
|
||||||
|
from app.models.user import User
|
||||||
|
from app.db.database import get_db
|
||||||
|
from app.services.plugin_sandbox import plugin_loader
|
||||||
|
from app.utils.exceptions import SecurityError, PluginError
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
|
||||||
|
logger = get_logger("plugin.gateway")
|
||||||
|
security = HTTPBearer()
|
||||||
|
|
||||||
|
|
||||||
|
class PluginAuthenticationService:
|
||||||
|
"""Handles plugin authentication and authorization"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def verify_plugin_token(token: str, db: Session) -> Tuple[Optional[str], Optional[Dict[str, Any]]]:
|
||||||
|
"""Verify plugin authentication token"""
|
||||||
|
try:
|
||||||
|
# Decode JWT token
|
||||||
|
payload = jwt.decode(
|
||||||
|
token,
|
||||||
|
settings.JWT_SECRET,
|
||||||
|
algorithms=["HS256"]
|
||||||
|
)
|
||||||
|
|
||||||
|
user_id = payload.get("sub")
|
||||||
|
plugin_id = payload.get("plugin_id")
|
||||||
|
|
||||||
|
if not user_id or not plugin_id:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
# Verify plugin exists and is active
|
||||||
|
plugin = db.query(Plugin).filter(
|
||||||
|
Plugin.id == plugin_id,
|
||||||
|
Plugin.status == "enabled"
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not plugin:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
# Get user
|
||||||
|
user = db.query(User).filter(User.id == user_id).first()
|
||||||
|
if not user:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
return user_id, {
|
||||||
|
"user": user,
|
||||||
|
"plugin": plugin,
|
||||||
|
"permissions": payload.get("permissions", [])
|
||||||
|
}
|
||||||
|
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def verify_api_key_access(api_key: str, plugin_id: str, db: Session) -> Tuple[Optional[str], Optional[Dict[str, Any]]]:
|
||||||
|
"""Verify API key has access to plugin"""
|
||||||
|
try:
|
||||||
|
# Find API key
|
||||||
|
api_key_obj = db.query(APIKey).filter(
|
||||||
|
APIKey.key_hash == hashlib.sha256(api_key.encode()).hexdigest(),
|
||||||
|
APIKey.is_active == True
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not api_key_obj:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
# Check if API key has plugin access
|
||||||
|
plugin = db.query(Plugin).filter(
|
||||||
|
Plugin.id == plugin_id,
|
||||||
|
Plugin.status == "enabled"
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not plugin:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
# Verify plugin permissions for API key
|
||||||
|
# TODO: Check plugin-specific permissions in API key scopes
|
||||||
|
|
||||||
|
return api_key_obj.user_id, {
|
||||||
|
"user": api_key_obj.user,
|
||||||
|
"plugin": plugin,
|
||||||
|
"api_key": api_key_obj,
|
||||||
|
"permissions": ["api_access"]
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"API key verification failed: {e}")
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def check_plugin_permissions(user_id: str, plugin_id: str, endpoint: str,
|
||||||
|
method: str, db: Session) -> bool:
|
||||||
|
"""Check if user has permission to access plugin endpoint"""
|
||||||
|
try:
|
||||||
|
# Get plugin configuration for user
|
||||||
|
config = db.query(PluginConfiguration).filter(
|
||||||
|
PluginConfiguration.user_id == user_id,
|
||||||
|
PluginConfiguration.plugin_id == plugin_id,
|
||||||
|
PluginConfiguration.is_active == True
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not config:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Get plugin manifest to check endpoint permissions
|
||||||
|
plugin = db.query(Plugin).filter(Plugin.id == plugin_id).first()
|
||||||
|
if not plugin or not plugin.manifest_data:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check endpoint permissions in manifest
|
||||||
|
manifest = plugin.manifest_data
|
||||||
|
api_endpoints = manifest.get("spec", {}).get("api_endpoints", [])
|
||||||
|
|
||||||
|
for ep in api_endpoints:
|
||||||
|
if ep.get("path") == endpoint and method in ep.get("methods", []):
|
||||||
|
return ep.get("auth_required", True) # Default to requiring auth
|
||||||
|
|
||||||
|
# If endpoint not found in manifest, deny access
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Permission check failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class PluginAuditService:
|
||||||
|
"""Handles audit logging for plugin access"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def log_plugin_access(
|
||||||
|
plugin_id: str,
|
||||||
|
user_id: Optional[str],
|
||||||
|
api_key_id: Optional[str],
|
||||||
|
endpoint: str,
|
||||||
|
method: str,
|
||||||
|
ip_address: str,
|
||||||
|
user_agent: str,
|
||||||
|
status_code: int,
|
||||||
|
response_time_ms: float,
|
||||||
|
db: Session
|
||||||
|
):
|
||||||
|
"""Log plugin API access for audit trail"""
|
||||||
|
try:
|
||||||
|
audit_log = PluginAuditLog(
|
||||||
|
plugin_id=plugin_id,
|
||||||
|
user_id=user_id,
|
||||||
|
api_key_id=api_key_id,
|
||||||
|
action="api_access",
|
||||||
|
endpoint=endpoint,
|
||||||
|
method=method,
|
||||||
|
details={
|
||||||
|
"status_code": status_code,
|
||||||
|
"response_time_ms": response_time_ms,
|
||||||
|
"ip_address": ip_address,
|
||||||
|
"user_agent": user_agent
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(audit_log)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to log plugin access: {e}")
|
||||||
|
db.rollback()
|
||||||
|
|
||||||
|
|
||||||
|
class PluginRateLimiter:
|
||||||
|
"""Rate limiting for plugin API access"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.user_requests: Dict[str, List[float]] = {}
|
||||||
|
self.plugin_requests: Dict[str, List[float]] = {}
|
||||||
|
|
||||||
|
def check_rate_limit(self, user_id: str, plugin_id: str, limits: Dict[str, int]) -> bool:
|
||||||
|
"""Check if request is within rate limits"""
|
||||||
|
current_time = time.time()
|
||||||
|
window_seconds = 60 # 1 minute window
|
||||||
|
|
||||||
|
# Clean old requests
|
||||||
|
self._clean_old_requests(current_time, window_seconds)
|
||||||
|
|
||||||
|
# Check user rate limit
|
||||||
|
user_key = f"user:{user_id}"
|
||||||
|
user_limit = limits.get("user_requests_per_minute", 100)
|
||||||
|
|
||||||
|
if user_key not in self.user_requests:
|
||||||
|
self.user_requests[user_key] = []
|
||||||
|
|
||||||
|
if len(self.user_requests[user_key]) >= user_limit:
|
||||||
|
logger.warning(f"User {user_id} rate limit exceeded: {len(self.user_requests[user_key])}/{user_limit}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check plugin rate limit
|
||||||
|
plugin_key = f"plugin:{plugin_id}"
|
||||||
|
plugin_limit = limits.get("plugin_requests_per_minute", 200)
|
||||||
|
|
||||||
|
if plugin_key not in self.plugin_requests:
|
||||||
|
self.plugin_requests[plugin_key] = []
|
||||||
|
|
||||||
|
if len(self.plugin_requests[plugin_key]) >= plugin_limit:
|
||||||
|
logger.warning(f"Plugin {plugin_id} rate limit exceeded: {len(self.plugin_requests[plugin_key])}/{plugin_limit}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Record requests
|
||||||
|
self.user_requests[user_key].append(current_time)
|
||||||
|
self.plugin_requests[plugin_key].append(current_time)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _clean_old_requests(self, current_time: float, window_seconds: int):
|
||||||
|
"""Remove requests older than window"""
|
||||||
|
cutoff_time = current_time - window_seconds
|
||||||
|
|
||||||
|
for key in self.user_requests:
|
||||||
|
self.user_requests[key] = [
|
||||||
|
req_time for req_time in self.user_requests[key]
|
||||||
|
if req_time > cutoff_time
|
||||||
|
]
|
||||||
|
|
||||||
|
for key in self.plugin_requests:
|
||||||
|
self.plugin_requests[key] = [
|
||||||
|
req_time for req_time in self.plugin_requests[key]
|
||||||
|
if req_time > cutoff_time
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class PluginGatewayMiddleware(BaseHTTPMiddleware):
|
||||||
|
"""Middleware for plugin API gateway"""
|
||||||
|
|
||||||
|
def __init__(self, app: FastAPI):
|
||||||
|
super().__init__(app)
|
||||||
|
self.auth_service = PluginAuthenticationService()
|
||||||
|
self.audit_service = PluginAuditService()
|
||||||
|
self.rate_limiter = PluginRateLimiter()
|
||||||
|
|
||||||
|
async def dispatch(self, request: Request, call_next):
|
||||||
|
"""Process plugin API requests"""
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
# Check if this is a plugin API request
|
||||||
|
if not request.url.path.startswith("/api/v1/plugins/"):
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
# Extract plugin ID from path
|
||||||
|
path_parts = request.url.path.split("/")
|
||||||
|
if len(path_parts) < 5:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=404,
|
||||||
|
content={"error": "Invalid plugin API path"}
|
||||||
|
)
|
||||||
|
|
||||||
|
plugin_id = path_parts[4]
|
||||||
|
plugin_endpoint = "/" + "/".join(path_parts[5:]) if len(path_parts) > 5 else "/"
|
||||||
|
|
||||||
|
# Get database session
|
||||||
|
db = next(get_db())
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Authenticate request
|
||||||
|
auth_result = await self._authenticate_request(request, plugin_id, db)
|
||||||
|
if not auth_result:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=401,
|
||||||
|
content={"error": "Authentication failed"}
|
||||||
|
)
|
||||||
|
|
||||||
|
user_id, auth_context = auth_result
|
||||||
|
|
||||||
|
# Check permissions
|
||||||
|
has_permission = await self.auth_service.check_plugin_permissions(
|
||||||
|
user_id, plugin_id, plugin_endpoint, request.method, db
|
||||||
|
)
|
||||||
|
|
||||||
|
if not has_permission:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=403,
|
||||||
|
content={"error": "Insufficient permissions"}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check rate limits
|
||||||
|
rate_limits = {
|
||||||
|
"user_requests_per_minute": 100,
|
||||||
|
"plugin_requests_per_minute": 200
|
||||||
|
}
|
||||||
|
|
||||||
|
if not self.rate_limiter.check_rate_limit(user_id, plugin_id, rate_limits):
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=429,
|
||||||
|
content={"error": "Rate limit exceeded"}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add authentication context to request
|
||||||
|
request.state.user_id = user_id
|
||||||
|
request.state.plugin_id = plugin_id
|
||||||
|
request.state.auth_context = auth_context
|
||||||
|
request.state.plugin_endpoint = plugin_endpoint
|
||||||
|
|
||||||
|
# Forward to plugin
|
||||||
|
response = await self._forward_to_plugin(request, plugin_id, plugin_endpoint, call_next)
|
||||||
|
|
||||||
|
# Log access
|
||||||
|
response_time = (time.time() - start_time) * 1000
|
||||||
|
await self.audit_service.log_plugin_access(
|
||||||
|
plugin_id=plugin_id,
|
||||||
|
user_id=user_id,
|
||||||
|
api_key_id=auth_context.get("api_key", {}).get("id") if "api_key" in auth_context else None,
|
||||||
|
endpoint=plugin_endpoint,
|
||||||
|
method=request.method,
|
||||||
|
ip_address=request.client.host,
|
||||||
|
user_agent=request.headers.get("user-agent", ""),
|
||||||
|
status_code=response.status_code,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
db=db
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin gateway error: {e}")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=500,
|
||||||
|
content={"error": "Internal gateway error"}
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
async def _authenticate_request(self, request: Request, plugin_id: str, db: Session) -> Optional[Tuple[str, Dict[str, Any]]]:
|
||||||
|
"""Authenticate plugin API request"""
|
||||||
|
|
||||||
|
# Check for Authorization header
|
||||||
|
auth_header = request.headers.get("authorization")
|
||||||
|
if not auth_header:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if auth_header.startswith("Bearer "):
|
||||||
|
token = auth_header[7:]
|
||||||
|
|
||||||
|
# Try JWT token first
|
||||||
|
result = await self.auth_service.verify_plugin_token(token, db)
|
||||||
|
if result[0]:
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Try API key
|
||||||
|
result = await self.auth_service.verify_api_key_access(token, plugin_id, db)
|
||||||
|
if result[0]:
|
||||||
|
return result
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def _forward_to_plugin(self, request: Request, plugin_id: str,
|
||||||
|
plugin_endpoint: str, call_next) -> Response:
|
||||||
|
"""Forward request to plugin and handle response"""
|
||||||
|
|
||||||
|
# Check if plugin is loaded
|
||||||
|
plugin_instance = plugin_loader.loaded_plugins.get(plugin_id)
|
||||||
|
if not plugin_instance:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=503,
|
||||||
|
content={"error": f"Plugin {plugin_id} not loaded"}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check sandbox resource usage
|
||||||
|
sandbox = plugin_loader.get_plugin_sandbox(plugin_id)
|
||||||
|
if sandbox:
|
||||||
|
try:
|
||||||
|
sandbox.check_resource_usage()
|
||||||
|
sandbox.track_api_call()
|
||||||
|
except Exception as e:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=503,
|
||||||
|
content={"error": f"Plugin resource limit exceeded: {e}"}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add plugin context headers
|
||||||
|
request.headers.__dict__["_list"].extend([
|
||||||
|
(b"x-user-id", request.state.user_id.encode()),
|
||||||
|
(b"x-plugin-id", plugin_id.encode()),
|
||||||
|
(b"x-plugin-endpoint", plugin_endpoint.encode()),
|
||||||
|
(b"x-real-ip", request.client.host.encode()),
|
||||||
|
])
|
||||||
|
|
||||||
|
# Forward to plugin
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
|
||||||
|
class PluginAPIGateway:
|
||||||
|
"""Main plugin API gateway service"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.middleware = None
|
||||||
|
self.app = None
|
||||||
|
|
||||||
|
def init_app(self, app: FastAPI):
|
||||||
|
"""Initialize gateway with FastAPI app"""
|
||||||
|
self.app = app
|
||||||
|
self.middleware = PluginGatewayMiddleware(app)
|
||||||
|
app.add_middleware(PluginGatewayMiddleware)
|
||||||
|
|
||||||
|
# Add plugin management endpoints
|
||||||
|
self._add_management_endpoints(app)
|
||||||
|
|
||||||
|
def _add_management_endpoints(self, app: FastAPI):
|
||||||
|
"""Add plugin management endpoints"""
|
||||||
|
|
||||||
|
@app.get("/api/v1/plugins")
|
||||||
|
async def list_plugins(db: Session = Depends(get_db)):
|
||||||
|
"""List available plugins"""
|
||||||
|
plugins = db.query(Plugin).filter(Plugin.status == "enabled").all()
|
||||||
|
|
||||||
|
plugin_list = []
|
||||||
|
for plugin in plugins:
|
||||||
|
# Get runtime status
|
||||||
|
plugin_instance = plugin_loader.loaded_plugins.get(plugin.id)
|
||||||
|
loaded = plugin_instance is not None
|
||||||
|
|
||||||
|
# Get resource stats if loaded
|
||||||
|
resource_stats = {}
|
||||||
|
if loaded:
|
||||||
|
resource_stats = plugin_loader.get_resource_stats(plugin.id)
|
||||||
|
|
||||||
|
plugin_list.append({
|
||||||
|
"id": plugin.id,
|
||||||
|
"name": plugin.name,
|
||||||
|
"version": plugin.version,
|
||||||
|
"description": plugin.description,
|
||||||
|
"status": plugin.status,
|
||||||
|
"loaded": loaded,
|
||||||
|
"resource_usage": resource_stats,
|
||||||
|
"created_at": plugin.created_at.isoformat(),
|
||||||
|
"updated_at": plugin.updated_at.isoformat()
|
||||||
|
})
|
||||||
|
|
||||||
|
return {"plugins": plugin_list}
|
||||||
|
|
||||||
|
@app.get("/api/v1/plugins/{plugin_id}")
|
||||||
|
async def get_plugin(plugin_id: str, db: Session = Depends(get_db)):
|
||||||
|
"""Get plugin details"""
|
||||||
|
plugin = db.query(Plugin).filter(Plugin.id == plugin_id).first()
|
||||||
|
if not plugin:
|
||||||
|
raise HTTPException(status_code=404, detail="Plugin not found")
|
||||||
|
|
||||||
|
# Get runtime status
|
||||||
|
plugin_instance = plugin_loader.loaded_plugins.get(plugin_id)
|
||||||
|
loaded = plugin_instance is not None
|
||||||
|
|
||||||
|
# Get resource stats if loaded
|
||||||
|
resource_stats = {}
|
||||||
|
health_status = {}
|
||||||
|
if loaded:
|
||||||
|
resource_stats = plugin_loader.get_resource_stats(plugin_id)
|
||||||
|
health_status = await plugin_instance.health_check()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"plugin": {
|
||||||
|
"id": plugin.id,
|
||||||
|
"name": plugin.name,
|
||||||
|
"version": plugin.version,
|
||||||
|
"description": plugin.description,
|
||||||
|
"status": plugin.status,
|
||||||
|
"manifest": plugin.manifest_data,
|
||||||
|
"loaded": loaded,
|
||||||
|
"resource_usage": resource_stats,
|
||||||
|
"health": health_status,
|
||||||
|
"created_at": plugin.created_at.isoformat(),
|
||||||
|
"updated_at": plugin.updated_at.isoformat()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@app.post("/api/v1/plugins/{plugin_id}/load")
|
||||||
|
async def load_plugin(plugin_id: str, db: Session = Depends(get_db)):
|
||||||
|
"""Load a plugin"""
|
||||||
|
plugin = db.query(Plugin).filter(Plugin.id == plugin_id).first()
|
||||||
|
if not plugin:
|
||||||
|
raise HTTPException(status_code=404, detail="Plugin not found")
|
||||||
|
|
||||||
|
if plugin.status != "enabled":
|
||||||
|
raise HTTPException(status_code=400, detail="Plugin not enabled")
|
||||||
|
|
||||||
|
# Check if already loaded
|
||||||
|
if plugin_id in plugin_loader.loaded_plugins:
|
||||||
|
raise HTTPException(status_code=400, detail="Plugin already loaded")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Load plugin
|
||||||
|
plugin_dir = f"/plugins/{plugin_id}"
|
||||||
|
plugin_token = "temp_token" # TODO: Generate proper plugin token
|
||||||
|
|
||||||
|
await plugin_loader.load_plugin_with_sandbox(plugin_dir, plugin_token)
|
||||||
|
|
||||||
|
return {"status": "loaded", "plugin_id": plugin_id}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to load plugin: {e}")
|
||||||
|
|
||||||
|
@app.post("/api/v1/plugins/{plugin_id}/unload")
|
||||||
|
async def unload_plugin(plugin_id: str):
|
||||||
|
"""Unload a plugin"""
|
||||||
|
if plugin_id not in plugin_loader.loaded_plugins:
|
||||||
|
raise HTTPException(status_code=404, detail="Plugin not loaded")
|
||||||
|
|
||||||
|
try:
|
||||||
|
success = await plugin_loader.unload_plugin(plugin_id)
|
||||||
|
if success:
|
||||||
|
return {"status": "unloaded", "plugin_id": plugin_id}
|
||||||
|
else:
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to unload plugin")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to unload plugin: {e}")
|
||||||
|
|
||||||
|
@app.get("/api/v1/plugins/{plugin_id}/health")
|
||||||
|
async def get_plugin_health(plugin_id: str):
|
||||||
|
"""Get plugin health status"""
|
||||||
|
plugin_instance = plugin_loader.loaded_plugins.get(plugin_id)
|
||||||
|
if not plugin_instance:
|
||||||
|
raise HTTPException(status_code=404, detail="Plugin not loaded")
|
||||||
|
|
||||||
|
try:
|
||||||
|
health = await plugin_instance.health_check()
|
||||||
|
resource_stats = plugin_loader.get_resource_stats(plugin_id)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"health": health,
|
||||||
|
"resource_usage": resource_stats
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Health check failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# Global gateway instance
|
||||||
|
plugin_gateway = PluginAPIGateway()
|
||||||
790
backend/app/services/plugin_registry.py
Normal file
790
backend/app/services/plugin_registry.py
Normal file
@@ -0,0 +1,790 @@
|
|||||||
|
"""
|
||||||
|
Plugin Registry and Discovery System
|
||||||
|
Handles plugin installation, updates, discovery, and marketplace functionality
|
||||||
|
"""
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import zipfile
|
||||||
|
import aiohttp
|
||||||
|
from typing import Dict, Any, List, Optional, Tuple
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy import and_, or_
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import base64
|
||||||
|
from cryptography.hazmat.primitives import hashes, serialization
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import rsa, padding
|
||||||
|
from cryptography.exceptions import InvalidSignature
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
from app.models.plugin import Plugin, PluginConfiguration, PluginAuditLog
|
||||||
|
from app.models.user import User
|
||||||
|
from app.db.database import get_db
|
||||||
|
from app.schemas.plugin_manifest import PluginManifestValidator, validate_manifest_file
|
||||||
|
from app.services.plugin_sandbox import plugin_loader
|
||||||
|
from app.services.plugin_database import plugin_db_manager, plugin_migration_manager
|
||||||
|
from app.utils.exceptions import PluginError, SecurityError, ValidationError
|
||||||
|
|
||||||
|
|
||||||
|
logger = get_logger("plugin.registry")
|
||||||
|
|
||||||
|
|
||||||
|
class PluginRepositoryClient:
|
||||||
|
"""Client for interacting with plugin repositories"""
|
||||||
|
|
||||||
|
def __init__(self, repository_url: str = None):
|
||||||
|
self.repository_url = repository_url or settings.PLUGIN_REPOSITORY_URL
|
||||||
|
self.timeout = 30
|
||||||
|
|
||||||
|
async def search_plugins(self, query: str, tags: List[str] = None,
|
||||||
|
limit: int = 20) -> List[Dict[str, Any]]:
|
||||||
|
"""Search for plugins in repository"""
|
||||||
|
try:
|
||||||
|
# Try connecting to the repository
|
||||||
|
params = {
|
||||||
|
"q": query,
|
||||||
|
"limit": limit
|
||||||
|
}
|
||||||
|
|
||||||
|
if tags:
|
||||||
|
params["tags"] = ",".join(tags)
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.get(
|
||||||
|
f"{self.repository_url}/api/plugins/search",
|
||||||
|
params=params,
|
||||||
|
timeout=self.timeout
|
||||||
|
) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
data = await response.json()
|
||||||
|
return data.get("plugins", [])
|
||||||
|
else:
|
||||||
|
logger.error(f"Plugin search failed: {response.status}")
|
||||||
|
# Repository unavailable, return empty list
|
||||||
|
return []
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin search error: {e}")
|
||||||
|
# Repository unavailable, return empty list
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
async def get_plugin_info(self, plugin_id: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get detailed information about a plugin"""
|
||||||
|
try:
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.get(
|
||||||
|
f"{self.repository_url}/api/plugins/{plugin_id}",
|
||||||
|
timeout=self.timeout
|
||||||
|
) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
return await response.json()
|
||||||
|
elif response.status == 404:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
logger.error(f"Failed to get plugin info for {plugin_id}: {response.status}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting plugin info for {plugin_id}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def download_plugin(self, plugin_id: str, version: str,
|
||||||
|
download_path: Path) -> bool:
|
||||||
|
"""Download plugin package from repository"""
|
||||||
|
try:
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.get(
|
||||||
|
f"{self.repository_url}/api/plugins/{plugin_id}/download/{version}",
|
||||||
|
timeout=60 # Longer timeout for downloads
|
||||||
|
) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
with open(download_path, 'wb') as f:
|
||||||
|
async for chunk in response.content.iter_chunked(8192):
|
||||||
|
f.write(chunk)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
logger.error(f"Plugin download failed: {response.status}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin download error: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def verify_plugin_signature(self, plugin_path: Path,
|
||||||
|
signature: str) -> bool:
|
||||||
|
"""Verify plugin package signature using RSA digital signatures"""
|
||||||
|
try:
|
||||||
|
# Calculate file hash
|
||||||
|
with open(plugin_path, 'rb') as f:
|
||||||
|
file_content = f.read()
|
||||||
|
file_hash = hashlib.sha256(file_content).digest()
|
||||||
|
|
||||||
|
# Load platform public key for verification
|
||||||
|
public_key = self._get_platform_public_key()
|
||||||
|
if not public_key:
|
||||||
|
logger.error("No platform public key available for signature verification")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Decode base64 signature
|
||||||
|
try:
|
||||||
|
signature_bytes = base64.b64decode(signature)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Invalid signature format: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Verify RSA signature
|
||||||
|
try:
|
||||||
|
public_key.verify(
|
||||||
|
signature_bytes,
|
||||||
|
file_hash,
|
||||||
|
padding.PSS(
|
||||||
|
mgf=padding.MGF1(hashes.SHA256()),
|
||||||
|
salt_length=padding.PSS.MAX_LENGTH
|
||||||
|
),
|
||||||
|
hashes.SHA256()
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Plugin signature verified successfully for {plugin_path.name}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except InvalidSignature:
|
||||||
|
logger.error(f"Invalid signature for plugin {plugin_path.name}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Signature verification error: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _get_platform_public_key(self):
|
||||||
|
"""Get platform public key for signature verification"""
|
||||||
|
try:
|
||||||
|
# Try to load from environment variable first
|
||||||
|
public_key_pem = os.environ.get('PLUGIN_SIGNING_PUBLIC_KEY')
|
||||||
|
|
||||||
|
if public_key_pem:
|
||||||
|
public_key = serialization.load_pem_public_key(public_key_pem.encode())
|
||||||
|
return public_key
|
||||||
|
|
||||||
|
# Fall back to file-based public key
|
||||||
|
public_key_path = Path("/data/plugin_keys/public_key.pem")
|
||||||
|
if public_key_path.exists():
|
||||||
|
with open(public_key_path, 'rb') as f:
|
||||||
|
public_key = serialization.load_pem_public_key(f.read())
|
||||||
|
return public_key
|
||||||
|
|
||||||
|
# Generate development key pair if none exists
|
||||||
|
return self._generate_development_key_pair()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to load platform public key: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _generate_development_key_pair(self):
|
||||||
|
"""Generate development key pair for testing (NOT for production)"""
|
||||||
|
try:
|
||||||
|
logger.warning("Generating development key pair for plugin signing - NOT for production use!")
|
||||||
|
|
||||||
|
# Generate RSA key pair
|
||||||
|
private_key = rsa.generate_private_key(
|
||||||
|
public_exponent=65537,
|
||||||
|
key_size=2048
|
||||||
|
)
|
||||||
|
public_key = private_key.public_key()
|
||||||
|
|
||||||
|
# Save keys to secure location
|
||||||
|
keys_dir = Path("/data/plugin_keys")
|
||||||
|
keys_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Save private key (for development signing)
|
||||||
|
private_pem = private_key.private_bytes(
|
||||||
|
encoding=serialization.Encoding.PEM,
|
||||||
|
format=serialization.PrivateFormat.PKCS8,
|
||||||
|
encryption_algorithm=serialization.NoEncryption()
|
||||||
|
)
|
||||||
|
|
||||||
|
private_key_path = keys_dir / "private_key.pem"
|
||||||
|
with open(private_key_path, 'wb') as f:
|
||||||
|
f.write(private_pem)
|
||||||
|
|
||||||
|
# Save public key
|
||||||
|
public_pem = public_key.public_bytes(
|
||||||
|
encoding=serialization.Encoding.PEM,
|
||||||
|
format=serialization.PublicFormat.SubjectPublicKeyInfo
|
||||||
|
)
|
||||||
|
|
||||||
|
public_key_path = keys_dir / "public_key.pem"
|
||||||
|
with open(public_key_path, 'wb') as f:
|
||||||
|
f.write(public_pem)
|
||||||
|
|
||||||
|
# Log the public key for production configuration
|
||||||
|
public_key_b64 = base64.b64encode(public_pem).decode()
|
||||||
|
logger.warning(
|
||||||
|
f"Generated development keys. For production, set PLUGIN_SIGNING_PUBLIC_KEY environment variable to: "
|
||||||
|
f"{public_key_b64}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return public_key
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to generate development key pair: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def sign_plugin_package(self, plugin_path: Path) -> Optional[str]:
|
||||||
|
"""Sign plugin package (for development/testing)"""
|
||||||
|
try:
|
||||||
|
# This method is for development use only
|
||||||
|
private_key_path = Path("/data/plugin_keys/private_key.pem")
|
||||||
|
if not private_key_path.exists():
|
||||||
|
logger.error("No private key available for signing")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Load private key
|
||||||
|
with open(private_key_path, 'rb') as f:
|
||||||
|
private_key = serialization.load_pem_private_key(
|
||||||
|
f.read(),
|
||||||
|
password=None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Calculate file hash
|
||||||
|
with open(plugin_path, 'rb') as f:
|
||||||
|
file_hash = hashlib.sha256(f.read()).digest()
|
||||||
|
|
||||||
|
# Sign the hash
|
||||||
|
signature = private_key.sign(
|
||||||
|
file_hash,
|
||||||
|
padding.PSS(
|
||||||
|
mgf=padding.MGF1(hashes.SHA256()),
|
||||||
|
salt_length=padding.PSS.MAX_LENGTH
|
||||||
|
),
|
||||||
|
hashes.SHA256()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return base64-encoded signature
|
||||||
|
return base64.b64encode(signature).decode()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to sign plugin package: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class PluginInstaller:
|
||||||
|
"""Handles plugin installation and updates"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.plugins_dir = Path(settings.PLUGINS_DIR or "/plugins")
|
||||||
|
self.plugins_dir.mkdir(exist_ok=True)
|
||||||
|
self.temp_dir = Path(tempfile.gettempdir()) / "enclava_plugins"
|
||||||
|
self.temp_dir.mkdir(exist_ok=True)
|
||||||
|
|
||||||
|
async def install_plugin_from_file(self, plugin_file: Path, user_id: str,
|
||||||
|
db: AsyncSession) -> Dict[str, Any]:
|
||||||
|
"""Install plugin from uploaded file"""
|
||||||
|
try:
|
||||||
|
# Extract plugin to temporary directory
|
||||||
|
temp_extract_dir = self.temp_dir / f"extract_{int(asyncio.get_event_loop().time())}"
|
||||||
|
temp_extract_dir.mkdir(exist_ok=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Extract ZIP file
|
||||||
|
with zipfile.ZipFile(plugin_file, 'r') as zip_ref:
|
||||||
|
zip_ref.extractall(temp_extract_dir)
|
||||||
|
|
||||||
|
# Find and validate manifest
|
||||||
|
manifest_path = self._find_manifest(temp_extract_dir)
|
||||||
|
if not manifest_path:
|
||||||
|
raise ValidationError("No valid manifest.yaml found in plugin package")
|
||||||
|
|
||||||
|
validation_result = validate_manifest_file(manifest_path)
|
||||||
|
if not validation_result["valid"]:
|
||||||
|
raise ValidationError(f"Invalid plugin manifest: {validation_result['errors']}")
|
||||||
|
|
||||||
|
manifest = validation_result["manifest"]
|
||||||
|
plugin_id = manifest.metadata.name
|
||||||
|
|
||||||
|
# Check if plugin already exists
|
||||||
|
from sqlalchemy import select
|
||||||
|
stmt = select(Plugin).where(Plugin.id == plugin_id)
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
existing_plugin = result.scalar_one_or_none()
|
||||||
|
if existing_plugin:
|
||||||
|
return await self._update_existing_plugin(
|
||||||
|
existing_plugin, temp_extract_dir, manifest, user_id, db
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return await self._install_new_plugin(
|
||||||
|
temp_extract_dir, manifest, user_id, db
|
||||||
|
)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Cleanup temporary directory
|
||||||
|
shutil.rmtree(temp_extract_dir, ignore_errors=True)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin installation failed: {e}")
|
||||||
|
raise PluginError(f"Installation failed: {e}")
|
||||||
|
|
||||||
|
async def install_plugin_from_repository(self, plugin_id: str, version: str,
|
||||||
|
user_id: str, db: AsyncSession) -> Dict[str, Any]:
|
||||||
|
"""Install plugin from repository"""
|
||||||
|
try:
|
||||||
|
# Download plugin
|
||||||
|
repo_client = PluginRepositoryClient()
|
||||||
|
|
||||||
|
# Get plugin info
|
||||||
|
plugin_info = await repo_client.get_plugin_info(plugin_id)
|
||||||
|
if not plugin_info:
|
||||||
|
raise PluginError(f"Plugin {plugin_id} not found in repository")
|
||||||
|
|
||||||
|
# Download plugin package
|
||||||
|
download_path = self.temp_dir / f"{plugin_id}_{version}.zip"
|
||||||
|
success = await repo_client.download_plugin(plugin_id, version, download_path)
|
||||||
|
if not success:
|
||||||
|
raise PluginError(f"Failed to download plugin {plugin_id}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Verify signature if available
|
||||||
|
signature = plugin_info.get("signature")
|
||||||
|
if signature:
|
||||||
|
verified = await repo_client.verify_plugin_signature(download_path, signature)
|
||||||
|
if not verified:
|
||||||
|
raise SecurityError("Plugin signature verification failed")
|
||||||
|
|
||||||
|
# Install from downloaded file
|
||||||
|
return await self.install_plugin_from_file(download_path, user_id, db)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Cleanup downloaded file
|
||||||
|
download_path.unlink(missing_ok=True)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Repository installation failed: {e}")
|
||||||
|
raise PluginError(f"Repository installation failed: {e}")
|
||||||
|
|
||||||
|
async def uninstall_plugin(self, plugin_id: str, user_id: str,
|
||||||
|
db: AsyncSession, keep_data: bool = True) -> Dict[str, Any]:
|
||||||
|
"""Uninstall plugin"""
|
||||||
|
try:
|
||||||
|
# Get plugin
|
||||||
|
from sqlalchemy import select
|
||||||
|
stmt = select(Plugin).where(Plugin.id == plugin_id)
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
plugin = result.scalar_one_or_none()
|
||||||
|
if not plugin:
|
||||||
|
raise PluginError(f"Plugin {plugin_id} not found")
|
||||||
|
|
||||||
|
# Check if user can uninstall
|
||||||
|
if plugin.installed_by_user_id != user_id:
|
||||||
|
# Check if user has admin permissions
|
||||||
|
user_stmt = select(User).where(User.id == user_id)
|
||||||
|
user_result = await db.execute(user_stmt)
|
||||||
|
user = user_result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not user:
|
||||||
|
raise PluginError(f"User {user_id} not found")
|
||||||
|
|
||||||
|
# Check if user is admin
|
||||||
|
if not (hasattr(user, 'is_admin') and user.is_admin):
|
||||||
|
raise PluginError("Insufficient permissions to uninstall plugin. Only plugin owner or admin can uninstall.")
|
||||||
|
|
||||||
|
logger.info(f"Admin user {user_id} uninstalling plugin {plugin_id} installed by {plugin.installed_by_user_id}")
|
||||||
|
|
||||||
|
# Unload plugin if running
|
||||||
|
if plugin_id in plugin_loader.loaded_plugins:
|
||||||
|
await plugin_loader.unload_plugin(plugin_id)
|
||||||
|
|
||||||
|
# Backup data if requested
|
||||||
|
backup_path = None
|
||||||
|
if keep_data:
|
||||||
|
backup_path = await plugin_db_manager.backup_plugin_data(plugin_id)
|
||||||
|
|
||||||
|
# Delete database schema if not keeping data
|
||||||
|
if not keep_data:
|
||||||
|
await plugin_db_manager.delete_plugin_schema(plugin_id)
|
||||||
|
|
||||||
|
# Remove plugin files
|
||||||
|
plugin_dir = self.plugins_dir / plugin_id
|
||||||
|
if plugin_dir.exists():
|
||||||
|
shutil.rmtree(plugin_dir)
|
||||||
|
|
||||||
|
# Update database
|
||||||
|
plugin.status = "uninstalled"
|
||||||
|
plugin.updated_at = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
# Log uninstall
|
||||||
|
audit_log = PluginAuditLog(
|
||||||
|
plugin_id=plugin_id,
|
||||||
|
user_id=user_id,
|
||||||
|
action="uninstall",
|
||||||
|
details={
|
||||||
|
"keep_data": keep_data,
|
||||||
|
"backup_path": backup_path
|
||||||
|
}
|
||||||
|
)
|
||||||
|
db.add(audit_log)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
logger.info(f"Plugin {plugin_id} uninstalled successfully")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "uninstalled",
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"backup_path": backup_path,
|
||||||
|
"data_kept": keep_data
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
await db.rollback()
|
||||||
|
logger.error(f"Plugin uninstall failed: {e}")
|
||||||
|
raise PluginError(f"Uninstall failed: {e}")
|
||||||
|
|
||||||
|
def _find_manifest(self, plugin_dir: Path) -> Optional[Path]:
|
||||||
|
"""Find manifest.yaml file in plugin directory"""
|
||||||
|
# Look for manifest.yaml in root
|
||||||
|
manifest_path = plugin_dir / "manifest.yaml"
|
||||||
|
if manifest_path.exists():
|
||||||
|
return manifest_path
|
||||||
|
|
||||||
|
# Look for manifest.yaml in subdirectories
|
||||||
|
for subdir in plugin_dir.iterdir():
|
||||||
|
if subdir.is_dir():
|
||||||
|
manifest_path = subdir / "manifest.yaml"
|
||||||
|
if manifest_path.exists():
|
||||||
|
return manifest_path
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def _install_new_plugin(self, temp_dir: Path, manifest,
|
||||||
|
user_id: str, db: AsyncSession) -> Dict[str, Any]:
|
||||||
|
"""Install new plugin"""
|
||||||
|
plugin_id = manifest.metadata.name
|
||||||
|
|
||||||
|
# Create plugin directory
|
||||||
|
plugin_dir = self.plugins_dir / plugin_id
|
||||||
|
if plugin_dir.exists():
|
||||||
|
shutil.rmtree(plugin_dir)
|
||||||
|
|
||||||
|
# Copy plugin files
|
||||||
|
shutil.copytree(temp_dir, plugin_dir)
|
||||||
|
|
||||||
|
# Create database record
|
||||||
|
plugin = Plugin(
|
||||||
|
id=plugin_id,
|
||||||
|
name=manifest.metadata.name,
|
||||||
|
version=manifest.metadata.version,
|
||||||
|
description=manifest.metadata.description,
|
||||||
|
author=manifest.metadata.author,
|
||||||
|
manifest_data=manifest.dict(),
|
||||||
|
status="installed",
|
||||||
|
installed_by_user_id=user_id,
|
||||||
|
plugin_dir=str(plugin_dir)
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(plugin)
|
||||||
|
db.flush() # Get plugin ID
|
||||||
|
|
||||||
|
# Create database schema
|
||||||
|
await plugin_db_manager.create_plugin_schema(plugin_id, manifest.dict())
|
||||||
|
|
||||||
|
# Create migration environment
|
||||||
|
await plugin_migration_manager.create_migration_environment(plugin_id, plugin_dir)
|
||||||
|
|
||||||
|
# Log installation
|
||||||
|
audit_log = PluginAuditLog(
|
||||||
|
plugin_id=plugin_id,
|
||||||
|
user_id=user_id,
|
||||||
|
action="install",
|
||||||
|
details={
|
||||||
|
"version": manifest.metadata.version,
|
||||||
|
"source": "file_upload"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
db.add(audit_log)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
logger.info(f"New plugin {plugin_id} v{manifest.metadata.version} installed")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "installed",
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"version": manifest.metadata.version,
|
||||||
|
"new_installation": True
|
||||||
|
}
|
||||||
|
|
||||||
|
async def _update_existing_plugin(self, existing_plugin: Plugin, temp_dir: Path,
|
||||||
|
manifest, user_id: str, db: AsyncSession) -> Dict[str, Any]:
|
||||||
|
"""Update existing plugin"""
|
||||||
|
plugin_id = manifest.metadata.name
|
||||||
|
old_version = existing_plugin.version
|
||||||
|
new_version = manifest.metadata.version
|
||||||
|
|
||||||
|
# Version check
|
||||||
|
if old_version == new_version:
|
||||||
|
raise PluginError(f"Plugin {plugin_id} v{new_version} is already installed")
|
||||||
|
|
||||||
|
# Backup current version
|
||||||
|
backup_dir = self.plugins_dir / f"{plugin_id}_backup_{old_version}"
|
||||||
|
plugin_dir = self.plugins_dir / plugin_id
|
||||||
|
|
||||||
|
if plugin_dir.exists():
|
||||||
|
shutil.copytree(plugin_dir, backup_dir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Unload plugin if running
|
||||||
|
if plugin_id in plugin_loader.loaded_plugins:
|
||||||
|
await plugin_loader.unload_plugin(plugin_id)
|
||||||
|
|
||||||
|
# Update plugin files
|
||||||
|
if plugin_dir.exists():
|
||||||
|
shutil.rmtree(plugin_dir)
|
||||||
|
shutil.copytree(temp_dir, plugin_dir)
|
||||||
|
|
||||||
|
# Run migrations
|
||||||
|
await plugin_migration_manager.run_plugin_migrations(plugin_id, plugin_dir)
|
||||||
|
|
||||||
|
# Update database record
|
||||||
|
existing_plugin.version = new_version
|
||||||
|
existing_plugin.description = manifest.metadata.description
|
||||||
|
existing_plugin.manifest_data = manifest.dict()
|
||||||
|
existing_plugin.updated_at = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
# Log update
|
||||||
|
audit_log = PluginAuditLog(
|
||||||
|
plugin_id=plugin_id,
|
||||||
|
user_id=user_id,
|
||||||
|
action="update",
|
||||||
|
details={
|
||||||
|
"old_version": old_version,
|
||||||
|
"new_version": new_version,
|
||||||
|
"backup_dir": str(backup_dir)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
db.add(audit_log)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
# Cleanup backup after successful update
|
||||||
|
shutil.rmtree(backup_dir, ignore_errors=True)
|
||||||
|
|
||||||
|
logger.info(f"Plugin {plugin_id} updated from v{old_version} to v{new_version}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "updated",
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"old_version": old_version,
|
||||||
|
"new_version": new_version,
|
||||||
|
"new_installation": False
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Restore backup on failure
|
||||||
|
if backup_dir.exists():
|
||||||
|
if plugin_dir.exists():
|
||||||
|
shutil.rmtree(plugin_dir)
|
||||||
|
shutil.copytree(backup_dir, plugin_dir)
|
||||||
|
shutil.rmtree(backup_dir, ignore_errors=True)
|
||||||
|
|
||||||
|
await db.rollback()
|
||||||
|
raise PluginError(f"Plugin update failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
class PluginDiscoveryService:
|
||||||
|
"""Handles plugin discovery and marketplace functionality"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.repo_client = PluginRepositoryClient()
|
||||||
|
|
||||||
|
async def search_available_plugins(self, query: str = "", tags: List[str] = None,
|
||||||
|
category: str = None, limit: int = 20, db: AsyncSession = None) -> List[Dict[str, Any]]:
|
||||||
|
"""Search for available plugins"""
|
||||||
|
try:
|
||||||
|
# Search repository
|
||||||
|
plugins = await self.repo_client.search_plugins(query, tags, limit)
|
||||||
|
|
||||||
|
# Add local installation status
|
||||||
|
if db is not None:
|
||||||
|
for plugin in plugins:
|
||||||
|
stmt = select(Plugin).where(Plugin.id == plugin["id"])
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
local_plugin = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if local_plugin:
|
||||||
|
plugin["local_status"] = {
|
||||||
|
"installed": True,
|
||||||
|
"version": local_plugin.version,
|
||||||
|
"status": local_plugin.status,
|
||||||
|
"update_available": plugin["version"] != local_plugin.version
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
plugin["local_status"] = {
|
||||||
|
"installed": False,
|
||||||
|
"update_available": False
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
# If no database session provided, mark all as not installed
|
||||||
|
for plugin in plugins:
|
||||||
|
plugin["local_status"] = {
|
||||||
|
"installed": False,
|
||||||
|
"update_available": False
|
||||||
|
}
|
||||||
|
|
||||||
|
return plugins
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plugin discovery error: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def get_installed_plugins(self, user_id: str, db: AsyncSession) -> List[Dict[str, Any]]:
|
||||||
|
"""Get list of installed plugins for user"""
|
||||||
|
try:
|
||||||
|
# Get all installed plugins (for now, show all plugins to all users)
|
||||||
|
# TODO: Implement proper user-based plugin visibility/permissions
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
stmt = select(Plugin).where(
|
||||||
|
Plugin.status.in_(["installed", "enabled", "disabled"])
|
||||||
|
)
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
installed_plugins = result.scalars().all()
|
||||||
|
|
||||||
|
# If no plugins installed, return empty list
|
||||||
|
if not installed_plugins:
|
||||||
|
return []
|
||||||
|
|
||||||
|
plugin_list = []
|
||||||
|
for plugin in installed_plugins:
|
||||||
|
try:
|
||||||
|
# Get runtime status safely
|
||||||
|
plugin_id = str(plugin.id) # Ensure string conversion
|
||||||
|
loaded = plugin_id in plugin_loader.loaded_plugins
|
||||||
|
health_status = {}
|
||||||
|
resource_stats = {}
|
||||||
|
|
||||||
|
if loaded:
|
||||||
|
try:
|
||||||
|
plugin_instance = plugin_loader.loaded_plugins[plugin_id]
|
||||||
|
health_status = await plugin_instance.health_check()
|
||||||
|
resource_stats = plugin_loader.get_resource_stats(plugin_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to get runtime info for plugin {plugin_id}: {e}")
|
||||||
|
|
||||||
|
# Get database stats safely
|
||||||
|
db_stats = {}
|
||||||
|
try:
|
||||||
|
db_stats = await plugin_db_manager.get_plugin_database_stats(plugin_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to get database stats for plugin {plugin_id}: {e}")
|
||||||
|
|
||||||
|
plugin_list.append({
|
||||||
|
"id": plugin_id,
|
||||||
|
"name": plugin.name or "Unknown",
|
||||||
|
"version": plugin.version or "Unknown",
|
||||||
|
"description": plugin.description or "",
|
||||||
|
"author": plugin.author or "Unknown",
|
||||||
|
"status": plugin.status,
|
||||||
|
"loaded": loaded,
|
||||||
|
"health": health_status,
|
||||||
|
"resource_usage": resource_stats,
|
||||||
|
"database_stats": db_stats,
|
||||||
|
"installed_at": plugin.installed_at.isoformat() if plugin.installed_at else None,
|
||||||
|
"updated_at": plugin.last_updated_at.isoformat() if plugin.last_updated_at else None
|
||||||
|
})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error processing plugin {getattr(plugin, 'id', 'unknown')}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
return plugin_list
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting installed plugins: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
async def get_plugin_updates(self, db: AsyncSession) -> List[Dict[str, Any]]:
|
||||||
|
"""Check for available plugin updates"""
|
||||||
|
try:
|
||||||
|
from sqlalchemy import select
|
||||||
|
stmt = select(Plugin).where(
|
||||||
|
Plugin.status.in_(["installed", "enabled"])
|
||||||
|
)
|
||||||
|
result = await db.execute(stmt)
|
||||||
|
installed_plugins = result.scalars().all()
|
||||||
|
|
||||||
|
updates = []
|
||||||
|
for plugin in installed_plugins:
|
||||||
|
try:
|
||||||
|
# Check repository for newer version
|
||||||
|
plugin_info = await self.repo_client.get_plugin_info(plugin.id)
|
||||||
|
if plugin_info and plugin_info["version"] != plugin.version:
|
||||||
|
updates.append({
|
||||||
|
"plugin_id": plugin.id,
|
||||||
|
"name": plugin.name,
|
||||||
|
"current_version": plugin.version,
|
||||||
|
"available_version": plugin_info["version"],
|
||||||
|
"description": plugin_info.get("description", ""),
|
||||||
|
"changelog": plugin_info.get("changelog", ""),
|
||||||
|
"update_available": True
|
||||||
|
})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to check updates for {plugin.id}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
return updates
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error checking plugin updates: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def get_plugin_categories(self) -> List[Dict[str, Any]]:
|
||||||
|
"""Get available plugin categories"""
|
||||||
|
try:
|
||||||
|
# TODO: Implement category discovery from repository
|
||||||
|
default_categories = [
|
||||||
|
{
|
||||||
|
"id": "integrations",
|
||||||
|
"name": "Integrations",
|
||||||
|
"description": "Third-party service integrations"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "ai-tools",
|
||||||
|
"name": "AI Tools",
|
||||||
|
"description": "AI and machine learning tools"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "productivity",
|
||||||
|
"name": "Productivity",
|
||||||
|
"description": "Productivity and workflow tools"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "analytics",
|
||||||
|
"name": "Analytics",
|
||||||
|
"description": "Data analytics and reporting"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "communication",
|
||||||
|
"name": "Communication",
|
||||||
|
"description": "Communication and collaboration tools"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "security",
|
||||||
|
"name": "Security",
|
||||||
|
"description": "Security and compliance tools"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
return default_categories
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting plugin categories: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
# Global instances
|
||||||
|
plugin_installer = PluginInstaller()
|
||||||
|
plugin_discovery = PluginDiscoveryService()
|
||||||
609
backend/app/services/plugin_sandbox.py
Normal file
609
backend/app/services/plugin_sandbox.py
Normal file
@@ -0,0 +1,609 @@
|
|||||||
|
"""
|
||||||
|
Plugin Sandbox Environment
|
||||||
|
Provides secure execution environment for plugins with resource limits and monitoring
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import importlib
|
||||||
|
import importlib.util
|
||||||
|
import resource
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import psutil
|
||||||
|
import asyncio
|
||||||
|
from typing import Dict, Any, Optional, List, Set
|
||||||
|
from pathlib import Path
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
from app.utils.exceptions import SecurityError, PluginError
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SandboxLimits:
|
||||||
|
"""Resource limits for plugin sandbox"""
|
||||||
|
max_memory_mb: int = -1 # No memory limit (-1 = unlimited)
|
||||||
|
max_cpu_percent: int = 25
|
||||||
|
max_disk_mb: int = 100
|
||||||
|
max_api_calls_per_minute: int = 100
|
||||||
|
max_execution_time_seconds: int = 30
|
||||||
|
max_file_descriptors: int = 50
|
||||||
|
max_threads: int = 10
|
||||||
|
allowed_domains: List[str] = None
|
||||||
|
network_timeout_seconds: int = 30
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
if self.allowed_domains is None:
|
||||||
|
self.allowed_domains = []
|
||||||
|
|
||||||
|
|
||||||
|
class PluginImportHook:
|
||||||
|
"""Custom import hook to restrict plugin imports"""
|
||||||
|
|
||||||
|
BLOCKED_MODULES = {
|
||||||
|
# Core platform modules
|
||||||
|
'app.db', 'app.models', 'app.core', 'app.services',
|
||||||
|
'sqlalchemy', 'alembic',
|
||||||
|
|
||||||
|
# Security sensitive
|
||||||
|
'subprocess', 'eval', 'exec', 'compile', '__import__',
|
||||||
|
'os.system', 'os.popen', 'os.spawn', 'os.fork', 'os.exec',
|
||||||
|
|
||||||
|
# System access
|
||||||
|
'socket', 'multiprocessing', 'threading.Thread',
|
||||||
|
'ctypes', 'mmap', 'resource', 'gc',
|
||||||
|
|
||||||
|
# File system
|
||||||
|
'shutil.rmtree', 'os.remove', 'os.rmdir',
|
||||||
|
|
||||||
|
# Network
|
||||||
|
'urllib3', 'requests.Session'
|
||||||
|
}
|
||||||
|
|
||||||
|
ALLOWED_MODULES = {
|
||||||
|
# Standard library
|
||||||
|
'asyncio', 'aiohttp', 'json', 'datetime', 'typing', 'pydantic',
|
||||||
|
'logging', 'time', 'uuid', 'hashlib', 'base64', 'pathlib',
|
||||||
|
're', 'urllib.parse', 'dataclasses', 'enum', 'collections',
|
||||||
|
'itertools', 'functools', 'operator', 'copy', 'string',
|
||||||
|
|
||||||
|
# Math and data
|
||||||
|
'math', 'decimal', 'fractions', 'statistics',
|
||||||
|
'pandas', 'numpy', 'yaml',
|
||||||
|
|
||||||
|
# HTTP clients
|
||||||
|
'httpx', 'aiohttp.ClientSession',
|
||||||
|
|
||||||
|
# Security and auth
|
||||||
|
'jwt', 'jose', 'cryptography',
|
||||||
|
|
||||||
|
# Database access for plugins
|
||||||
|
'sqlalchemy',
|
||||||
|
|
||||||
|
# Plugin framework
|
||||||
|
'app.services.base_plugin', 'app.schemas.plugin_manifest',
|
||||||
|
'app.services.plugin_database', # Plugin database access
|
||||||
|
'app.services.plugin_security', # Plugin security utilities
|
||||||
|
'app.utils.exceptions', # Plugin exception handling
|
||||||
|
'fastapi', 'pydantic'
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, plugin_id: str):
|
||||||
|
self.plugin_id = plugin_id
|
||||||
|
self.logger = get_logger(f"plugin.{plugin_id}.imports")
|
||||||
|
self.imported_modules: Set[str] = set()
|
||||||
|
|
||||||
|
def validate_import(self, name: str) -> bool:
|
||||||
|
"""Validate if module import is allowed"""
|
||||||
|
|
||||||
|
# Check if module is explicitly allowed first (takes precedence)
|
||||||
|
for allowed in self.ALLOWED_MODULES:
|
||||||
|
if name.startswith(allowed):
|
||||||
|
self.imported_modules.add(name)
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check if module is explicitly blocked
|
||||||
|
for blocked in self.BLOCKED_MODULES:
|
||||||
|
if name.startswith(blocked):
|
||||||
|
self.logger.error(f"Blocked import attempt: {name}")
|
||||||
|
raise SecurityError(f"Import '{name}' not allowed in plugin environment")
|
||||||
|
|
||||||
|
# Log potentially unsafe imports but allow (with warning)
|
||||||
|
self.logger.warning(f"Potentially unsafe import: {name}")
|
||||||
|
self.imported_modules.add(name)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_imported_modules(self) -> List[str]:
|
||||||
|
"""Get list of modules imported by plugin"""
|
||||||
|
return list(self.imported_modules)
|
||||||
|
|
||||||
|
|
||||||
|
class PluginResourceMonitor:
|
||||||
|
"""Monitors plugin resource usage and enforces limits"""
|
||||||
|
|
||||||
|
def __init__(self, plugin_id: str, limits: SandboxLimits):
|
||||||
|
self.plugin_id = plugin_id
|
||||||
|
self.limits = limits
|
||||||
|
self.logger = get_logger(f"plugin.{plugin_id}.resources")
|
||||||
|
|
||||||
|
self.start_time = time.time()
|
||||||
|
self.api_call_count = 0
|
||||||
|
self.api_call_window_start = time.time()
|
||||||
|
|
||||||
|
# Get current process for monitoring
|
||||||
|
self.process = psutil.Process()
|
||||||
|
self.initial_memory = self.process.memory_info().rss
|
||||||
|
|
||||||
|
def check_memory_limit(self) -> bool:
|
||||||
|
"""Check if memory usage is within limits"""
|
||||||
|
try:
|
||||||
|
# Memory limits disabled per user request
|
||||||
|
if self.limits.max_memory_mb <= 0:
|
||||||
|
return True
|
||||||
|
|
||||||
|
current_memory = self.process.memory_info().rss
|
||||||
|
memory_mb = (current_memory - self.initial_memory) / (1024 * 1024)
|
||||||
|
|
||||||
|
if memory_mb > self.limits.max_memory_mb:
|
||||||
|
self.logger.error(f"Memory limit exceeded: {memory_mb:.1f}MB > {self.limits.max_memory_mb}MB")
|
||||||
|
raise PluginError(f"Plugin {self.plugin_id} exceeded memory limit")
|
||||||
|
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Memory check failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def check_cpu_limit(self) -> bool:
|
||||||
|
"""Check if CPU usage is within limits"""
|
||||||
|
try:
|
||||||
|
cpu_percent = self.process.cpu_percent()
|
||||||
|
|
||||||
|
if cpu_percent > self.limits.max_cpu_percent:
|
||||||
|
self.logger.warning(f"CPU usage high: {cpu_percent:.1f}% > {self.limits.max_cpu_percent}%")
|
||||||
|
# Don't kill plugin immediately, just warn
|
||||||
|
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"CPU check failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def check_execution_time(self) -> bool:
|
||||||
|
"""Check if execution time is within limits"""
|
||||||
|
execution_time = time.time() - self.start_time
|
||||||
|
|
||||||
|
if execution_time > self.limits.max_execution_time_seconds:
|
||||||
|
self.logger.error(f"Execution time exceeded: {execution_time:.1f}s > {self.limits.max_execution_time_seconds}s")
|
||||||
|
raise PluginError(f"Plugin {self.plugin_id} exceeded execution time limit")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def track_api_call(self) -> bool:
|
||||||
|
"""Track API call and check rate limits"""
|
||||||
|
current_time = time.time()
|
||||||
|
|
||||||
|
# Reset counter if window expired
|
||||||
|
if current_time - self.api_call_window_start > 60: # 1 minute window
|
||||||
|
self.api_call_count = 0
|
||||||
|
self.api_call_window_start = current_time
|
||||||
|
|
||||||
|
self.api_call_count += 1
|
||||||
|
|
||||||
|
if self.api_call_count > self.limits.max_api_calls_per_minute:
|
||||||
|
self.logger.error(f"API rate limit exceeded: {self.api_call_count} > {self.limits.max_api_calls_per_minute}/min")
|
||||||
|
raise PluginError(f"Plugin {self.plugin_id} exceeded API rate limit")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_resource_stats(self) -> Dict[str, Any]:
|
||||||
|
"""Get current resource usage statistics"""
|
||||||
|
try:
|
||||||
|
memory_info = self.process.memory_info()
|
||||||
|
current_memory_mb = (memory_info.rss - self.initial_memory) / (1024 * 1024)
|
||||||
|
cpu_percent = self.process.cpu_percent()
|
||||||
|
execution_time = time.time() - self.start_time
|
||||||
|
|
||||||
|
return {
|
||||||
|
"memory_mb": round(current_memory_mb, 2),
|
||||||
|
"memory_limit_mb": "unlimited" if self.limits.max_memory_mb <= 0 else self.limits.max_memory_mb,
|
||||||
|
"cpu_percent": round(cpu_percent, 2),
|
||||||
|
"cpu_limit_percent": self.limits.max_cpu_percent,
|
||||||
|
"execution_time_seconds": round(execution_time, 2),
|
||||||
|
"execution_limit_seconds": self.limits.max_execution_time_seconds,
|
||||||
|
"api_calls_count": self.api_call_count,
|
||||||
|
"api_calls_limit": self.limits.max_api_calls_per_minute,
|
||||||
|
"threads_count": threading.active_count(),
|
||||||
|
"threads_limit": self.limits.max_threads
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to get resource stats: {e}")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
class PluginSandbox:
|
||||||
|
"""Secure sandbox environment for plugin execution"""
|
||||||
|
|
||||||
|
def __init__(self, plugin_id: str, plugin_dir: Path, limits: SandboxLimits = None):
|
||||||
|
self.plugin_id = plugin_id
|
||||||
|
self.plugin_dir = plugin_dir
|
||||||
|
self.limits = limits or SandboxLimits()
|
||||||
|
self.logger = get_logger(f"plugin.{plugin_id}.sandbox")
|
||||||
|
|
||||||
|
# Initialize components
|
||||||
|
self.import_hook = PluginImportHook(plugin_id)
|
||||||
|
self.resource_monitor = PluginResourceMonitor(plugin_id, self.limits)
|
||||||
|
|
||||||
|
# Sandbox state
|
||||||
|
self.active = False
|
||||||
|
self.original_modules = None
|
||||||
|
self.sandbox_modules = {}
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def activate(self):
|
||||||
|
"""Activate sandbox environment for plugin execution"""
|
||||||
|
if self.active:
|
||||||
|
raise PluginError(f"Sandbox already active for plugin {self.plugin_id}")
|
||||||
|
|
||||||
|
self.logger.info(f"Activating sandbox for plugin {self.plugin_id}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Store original state
|
||||||
|
self.original_modules = sys.modules.copy()
|
||||||
|
|
||||||
|
# Apply resource limits
|
||||||
|
self._apply_resource_limits()
|
||||||
|
|
||||||
|
# Install import hook
|
||||||
|
self._install_import_hook()
|
||||||
|
|
||||||
|
# Set sandbox environment variables
|
||||||
|
self._setup_environment()
|
||||||
|
|
||||||
|
self.active = True
|
||||||
|
yield self
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Sandbox activation failed: {e}")
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self.deactivate()
|
||||||
|
|
||||||
|
def deactivate(self):
|
||||||
|
"""Deactivate sandbox and restore original environment"""
|
||||||
|
if not self.active:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.logger.info(f"Deactivating sandbox for plugin {self.plugin_id}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Restore original modules
|
||||||
|
if self.original_modules:
|
||||||
|
# Remove plugin modules
|
||||||
|
modules_to_remove = []
|
||||||
|
for module_name in sys.modules:
|
||||||
|
if module_name not in self.original_modules:
|
||||||
|
modules_to_remove.append(module_name)
|
||||||
|
|
||||||
|
for module_name in modules_to_remove:
|
||||||
|
del sys.modules[module_name]
|
||||||
|
|
||||||
|
# Remove import hook
|
||||||
|
self._remove_import_hook()
|
||||||
|
|
||||||
|
# Reset resource limits
|
||||||
|
self._reset_resource_limits()
|
||||||
|
|
||||||
|
self.active = False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Sandbox deactivation failed: {e}")
|
||||||
|
|
||||||
|
def _apply_resource_limits(self):
|
||||||
|
"""Apply resource limits using system resources"""
|
||||||
|
try:
|
||||||
|
# Skip memory limits if disabled (per user request)
|
||||||
|
if self.limits.max_memory_mb > 0:
|
||||||
|
memory_bytes = self.limits.max_memory_mb * 1024 * 1024
|
||||||
|
resource.setrlimit(resource.RLIMIT_AS, (memory_bytes, memory_bytes))
|
||||||
|
self.logger.debug(f"Applied memory limit: {self.limits.max_memory_mb}MB")
|
||||||
|
else:
|
||||||
|
self.logger.debug("Memory limits disabled per user configuration")
|
||||||
|
|
||||||
|
# Set file descriptor limit
|
||||||
|
resource.setrlimit(resource.RLIMIT_NOFILE, (self.limits.max_file_descriptors, self.limits.max_file_descriptors))
|
||||||
|
|
||||||
|
self.logger.debug(f"Applied resource limits: memory={'unlimited' if self.limits.max_memory_mb <= 0 else f'{self.limits.max_memory_mb}MB'}, fds={self.limits.max_file_descriptors}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.warning(f"Failed to apply some resource limits: {e}")
|
||||||
|
|
||||||
|
def _reset_resource_limits(self):
|
||||||
|
"""Reset resource limits to system defaults"""
|
||||||
|
try:
|
||||||
|
# Reset to system limits (usually unlimited)
|
||||||
|
resource.setrlimit(resource.RLIMIT_AS, (-1, -1))
|
||||||
|
resource.setrlimit(resource.RLIMIT_NOFILE, (1024, 1024)) # Conservative default
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.warning(f"Failed to reset resource limits: {e}")
|
||||||
|
|
||||||
|
def _install_import_hook(self):
|
||||||
|
"""Install custom import hook for plugin"""
|
||||||
|
# Handle both dict and module forms of __builtins__
|
||||||
|
if isinstance(__builtins__, dict):
|
||||||
|
self.original_import = __builtins__['__import__']
|
||||||
|
else:
|
||||||
|
self.original_import = __builtins__.__import__
|
||||||
|
|
||||||
|
def restricted_import(name, globals=None, locals=None, fromlist=(), level=0):
|
||||||
|
# Validate import
|
||||||
|
self.import_hook.validate_import(name)
|
||||||
|
|
||||||
|
# Call original import
|
||||||
|
return self.original_import(name, globals, locals, fromlist, level)
|
||||||
|
|
||||||
|
# Replace __import__ (handle both dict and module forms)
|
||||||
|
if isinstance(__builtins__, dict):
|
||||||
|
__builtins__['__import__'] = restricted_import
|
||||||
|
else:
|
||||||
|
__builtins__.__import__ = restricted_import
|
||||||
|
|
||||||
|
def _remove_import_hook(self):
|
||||||
|
"""Remove custom import hook"""
|
||||||
|
if hasattr(self, 'original_import'):
|
||||||
|
# Handle both dict and module forms of __builtins__
|
||||||
|
if isinstance(__builtins__, dict):
|
||||||
|
__builtins__['__import__'] = self.original_import
|
||||||
|
else:
|
||||||
|
__builtins__.__import__ = self.original_import
|
||||||
|
|
||||||
|
def _setup_environment(self):
|
||||||
|
"""Setup sandbox environment variables"""
|
||||||
|
# Restrict plugin to its directory
|
||||||
|
os.environ[f'PLUGIN_{self.plugin_id.upper()}_DIR'] = str(self.plugin_dir)
|
||||||
|
|
||||||
|
# Set security flags
|
||||||
|
os.environ[f'PLUGIN_{self.plugin_id.upper()}_SANDBOX'] = 'true'
|
||||||
|
|
||||||
|
# Disable certain features
|
||||||
|
os.environ['PYTHONDONTWRITEBYTECODE'] = '1' # Don't write .pyc files
|
||||||
|
|
||||||
|
def validate_network_access(self, domain: str) -> bool:
|
||||||
|
"""Validate if plugin can access external domain"""
|
||||||
|
if not self.limits.allowed_domains:
|
||||||
|
return True # No restrictions
|
||||||
|
|
||||||
|
for allowed_domain in self.limits.allowed_domains:
|
||||||
|
if allowed_domain.startswith('*'):
|
||||||
|
# Wildcard matching
|
||||||
|
pattern = allowed_domain[1:] # Remove *
|
||||||
|
if domain.endswith(pattern):
|
||||||
|
return True
|
||||||
|
elif domain == allowed_domain:
|
||||||
|
return True
|
||||||
|
|
||||||
|
self.logger.error(f"Network access denied to domain: {domain}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def check_resource_usage(self) -> Dict[str, Any]:
|
||||||
|
"""Check current resource usage against limits"""
|
||||||
|
self.resource_monitor.check_memory_limit()
|
||||||
|
self.resource_monitor.check_cpu_limit()
|
||||||
|
self.resource_monitor.check_execution_time()
|
||||||
|
|
||||||
|
return self.resource_monitor.get_resource_stats()
|
||||||
|
|
||||||
|
def track_api_call(self) -> bool:
|
||||||
|
"""Track API call for rate limiting"""
|
||||||
|
return self.resource_monitor.track_api_call()
|
||||||
|
|
||||||
|
|
||||||
|
class EnhancedPluginLoader:
|
||||||
|
"""Enhanced plugin loader with comprehensive sandboxing"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.logger = get_logger("plugin.loader")
|
||||||
|
self.loaded_plugins: Dict[str, Any] = {}
|
||||||
|
self.plugin_sandboxes: Dict[str, PluginSandbox] = {}
|
||||||
|
|
||||||
|
async def load_plugin_with_sandbox(self, plugin_dir: Path, plugin_token: str,
|
||||||
|
sandbox_limits: SandboxLimits = None) -> Any:
|
||||||
|
"""Load plugin in secure sandbox environment"""
|
||||||
|
|
||||||
|
# Import validation functions here to avoid circular imports
|
||||||
|
from app.schemas.plugin_manifest import validate_manifest_file
|
||||||
|
from app.services.base_plugin import BasePlugin
|
||||||
|
|
||||||
|
plugin_dir = Path(plugin_dir)
|
||||||
|
|
||||||
|
# Load and validate manifest
|
||||||
|
manifest_path = plugin_dir / "manifest.yaml"
|
||||||
|
validation_result = validate_manifest_file(manifest_path)
|
||||||
|
|
||||||
|
if not validation_result["valid"]:
|
||||||
|
raise PluginError(f"Invalid plugin manifest: {validation_result['errors']}")
|
||||||
|
|
||||||
|
manifest = validation_result["manifest"]
|
||||||
|
plugin_id = manifest.metadata.name
|
||||||
|
|
||||||
|
# Check compatibility
|
||||||
|
compatibility = validation_result["compatibility"]
|
||||||
|
if not compatibility["compatible"]:
|
||||||
|
raise PluginError(f"Plugin incompatible: {compatibility['errors']}")
|
||||||
|
|
||||||
|
# Create sandbox with custom limits if specified
|
||||||
|
if sandbox_limits is None:
|
||||||
|
# Use manifest limits if available
|
||||||
|
sandbox_limits = SandboxLimits(
|
||||||
|
allowed_domains=manifest.spec.external_services.allowed_domains if manifest.spec.external_services else []
|
||||||
|
)
|
||||||
|
|
||||||
|
sandbox = PluginSandbox(plugin_id, plugin_dir, sandbox_limits)
|
||||||
|
self.plugin_sandboxes[plugin_id] = sandbox
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Load plugin in sandbox
|
||||||
|
with sandbox.activate():
|
||||||
|
plugin_instance = await self._load_plugin_module(plugin_dir, manifest, plugin_token)
|
||||||
|
|
||||||
|
# Initialize plugin
|
||||||
|
await plugin_instance.initialize()
|
||||||
|
plugin_instance.initialized = True
|
||||||
|
|
||||||
|
self.loaded_plugins[plugin_id] = plugin_instance
|
||||||
|
self.logger.info(f"Plugin {plugin_id} loaded successfully in sandbox")
|
||||||
|
|
||||||
|
return plugin_instance
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Cleanup on failure
|
||||||
|
if plugin_id in self.plugin_sandboxes:
|
||||||
|
del self.plugin_sandboxes[plugin_id]
|
||||||
|
raise PluginError(f"Failed to load plugin {plugin_id}: {e}")
|
||||||
|
|
||||||
|
async def _load_plugin_module(self, plugin_dir: Path, manifest, plugin_token: str):
|
||||||
|
"""Load plugin module with security validation"""
|
||||||
|
|
||||||
|
# Import here to avoid circular imports
|
||||||
|
from app.services.base_plugin import BasePlugin
|
||||||
|
|
||||||
|
# Validate plugin code security
|
||||||
|
main_py_path = plugin_dir / "main.py"
|
||||||
|
self._validate_plugin_security(main_py_path)
|
||||||
|
|
||||||
|
# Load module
|
||||||
|
spec = importlib.util.spec_from_file_location(
|
||||||
|
f"plugin_{manifest.metadata.name}",
|
||||||
|
main_py_path
|
||||||
|
)
|
||||||
|
|
||||||
|
if not spec or not spec.loader:
|
||||||
|
raise PluginError(f"Cannot load plugin module: {main_py_path}")
|
||||||
|
|
||||||
|
plugin_module = importlib.util.module_from_spec(spec)
|
||||||
|
|
||||||
|
# Add to sys.modules to allow imports
|
||||||
|
sys.modules[spec.name] = plugin_module
|
||||||
|
|
||||||
|
try:
|
||||||
|
spec.loader.exec_module(plugin_module)
|
||||||
|
except Exception as e:
|
||||||
|
raise PluginError(f"Failed to execute plugin module: {e}")
|
||||||
|
|
||||||
|
# Find plugin class
|
||||||
|
plugin_class = None
|
||||||
|
for attr_name in dir(plugin_module):
|
||||||
|
attr = getattr(plugin_module, attr_name)
|
||||||
|
if (isinstance(attr, type) and
|
||||||
|
issubclass(attr, BasePlugin) and
|
||||||
|
attr is not BasePlugin):
|
||||||
|
plugin_class = attr
|
||||||
|
break
|
||||||
|
|
||||||
|
if not plugin_class:
|
||||||
|
raise PluginError("Plugin must contain a class inheriting from BasePlugin")
|
||||||
|
|
||||||
|
# Instantiate plugin
|
||||||
|
return plugin_class(manifest, plugin_token)
|
||||||
|
|
||||||
|
def _validate_plugin_security(self, main_py_path: Path):
|
||||||
|
"""Enhanced security validation for plugin code"""
|
||||||
|
with open(main_py_path, 'r', encoding='utf-8') as f:
|
||||||
|
code_content = f.read()
|
||||||
|
|
||||||
|
# Dangerous patterns
|
||||||
|
dangerous_patterns = [
|
||||||
|
'eval(', 'exec(', 'compile(',
|
||||||
|
'subprocess.', 'os.system', 'os.popen', 'os.spawn',
|
||||||
|
'__import__', 'importlib.import_module',
|
||||||
|
'from app.db', 'from app.models', 'from app.core',
|
||||||
|
'SessionLocal', # Allow sqlalchemy but block direct SessionLocal access
|
||||||
|
'socket.', 'multiprocessing.',
|
||||||
|
'ctypes.', 'mmap.',
|
||||||
|
'shutil.rmtree', 'os.remove',
|
||||||
|
'resource.', 'gc.',
|
||||||
|
'threading.Thread('
|
||||||
|
]
|
||||||
|
|
||||||
|
for pattern in dangerous_patterns:
|
||||||
|
if pattern in code_content:
|
||||||
|
raise SecurityError(f"Dangerous pattern detected in plugin code: {pattern}")
|
||||||
|
|
||||||
|
# Check for suspicious imports
|
||||||
|
import_lines = [line for line in code_content.split('\n') if line.strip().startswith(('import ', 'from '))]
|
||||||
|
|
||||||
|
for line in import_lines:
|
||||||
|
# Extract module name
|
||||||
|
if line.strip().startswith('import '):
|
||||||
|
module = line.strip()[7:].split()[0].split('.')[0]
|
||||||
|
elif line.strip().startswith('from '):
|
||||||
|
module = line.strip()[5:].split()[0].split('.')[0]
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Validate against security manager
|
||||||
|
hook = PluginImportHook("security_check")
|
||||||
|
try:
|
||||||
|
hook.validate_import(module)
|
||||||
|
except SecurityError as e:
|
||||||
|
raise SecurityError(f"Security validation failed: {e}")
|
||||||
|
|
||||||
|
async def unload_plugin(self, plugin_id: str) -> bool:
|
||||||
|
"""Unload plugin and cleanup sandbox"""
|
||||||
|
if plugin_id not in self.loaded_plugins:
|
||||||
|
return False
|
||||||
|
|
||||||
|
plugin = self.loaded_plugins[plugin_id]
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Cleanup plugin
|
||||||
|
await plugin.cleanup()
|
||||||
|
|
||||||
|
# Deactivate sandbox
|
||||||
|
if plugin_id in self.plugin_sandboxes:
|
||||||
|
sandbox = self.plugin_sandboxes[plugin_id]
|
||||||
|
sandbox.deactivate()
|
||||||
|
del self.plugin_sandboxes[plugin_id]
|
||||||
|
|
||||||
|
# Remove from loaded plugins
|
||||||
|
del self.loaded_plugins[plugin_id]
|
||||||
|
|
||||||
|
self.logger.info(f"Plugin {plugin_id} unloaded successfully")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error unloading plugin {plugin_id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_plugin_sandbox(self, plugin_id: str) -> Optional[PluginSandbox]:
|
||||||
|
"""Get sandbox for plugin"""
|
||||||
|
return self.plugin_sandboxes.get(plugin_id)
|
||||||
|
|
||||||
|
def get_resource_stats(self, plugin_id: str) -> Dict[str, Any]:
|
||||||
|
"""Get resource usage statistics for plugin"""
|
||||||
|
sandbox = self.get_plugin_sandbox(plugin_id)
|
||||||
|
if sandbox:
|
||||||
|
return sandbox.check_resource_usage()
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def list_loaded_plugins(self) -> List[Dict[str, Any]]:
|
||||||
|
"""List all loaded plugins with their status"""
|
||||||
|
plugins = []
|
||||||
|
|
||||||
|
for plugin_id, plugin in self.loaded_plugins.items():
|
||||||
|
sandbox = self.get_plugin_sandbox(plugin_id)
|
||||||
|
resource_stats = self.get_resource_stats(plugin_id) if sandbox else {}
|
||||||
|
|
||||||
|
plugins.append({
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"version": plugin.version,
|
||||||
|
"initialized": plugin.initialized,
|
||||||
|
"sandbox_active": sandbox.active if sandbox else False,
|
||||||
|
"resource_usage": resource_stats
|
||||||
|
})
|
||||||
|
|
||||||
|
return plugins
|
||||||
|
|
||||||
|
|
||||||
|
# Global plugin loader instance
|
||||||
|
plugin_loader = EnhancedPluginLoader()
|
||||||
481
backend/app/services/plugin_schemas.py
Normal file
481
backend/app/services/plugin_schemas.py
Normal file
@@ -0,0 +1,481 @@
|
|||||||
|
"""
|
||||||
|
Plugin Configuration Schemas
|
||||||
|
Defines the configuration schemas for different plugins in the system.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, Any, List, Optional
|
||||||
|
|
||||||
|
|
||||||
|
def get_zammad_configuration_schema() -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Returns the configuration schema for the Zammad Integration plugin.
|
||||||
|
Based on the existing Zammad module implementation from enclava-jo.
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"type": "object",
|
||||||
|
"title": "Zammad Integration Configuration",
|
||||||
|
"description": "Configure AI-powered ticket summarization for Zammad ticketing system",
|
||||||
|
"properties": {
|
||||||
|
# Basic Settings
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "Configuration Name",
|
||||||
|
"description": "A descriptive name for this configuration",
|
||||||
|
"required": True,
|
||||||
|
"placeholder": "My Zammad Instance"
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "Description",
|
||||||
|
"description": "Optional description of this configuration",
|
||||||
|
"required": False,
|
||||||
|
"placeholder": "Production Zammad instance for customer support"
|
||||||
|
},
|
||||||
|
"is_default": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Default Configuration",
|
||||||
|
"description": "Set as the default configuration for processing",
|
||||||
|
"default": False,
|
||||||
|
"required": False
|
||||||
|
},
|
||||||
|
|
||||||
|
# Zammad Connection Settings
|
||||||
|
"zammad_url": {
|
||||||
|
"type": "url",
|
||||||
|
"title": "Zammad URL",
|
||||||
|
"description": "The base URL of your Zammad instance",
|
||||||
|
"required": True,
|
||||||
|
"placeholder": "https://your-zammad.example.com",
|
||||||
|
"pattern": "^https?://.+"
|
||||||
|
},
|
||||||
|
"api_token": {
|
||||||
|
"type": "password",
|
||||||
|
"title": "API Token",
|
||||||
|
"description": "Your Zammad API access token (will be encrypted)",
|
||||||
|
"required": True,
|
||||||
|
"placeholder": "Your Zammad API token"
|
||||||
|
},
|
||||||
|
|
||||||
|
# AI Integration
|
||||||
|
"chatbot_id": {
|
||||||
|
"type": "select",
|
||||||
|
"title": "AI Chatbot",
|
||||||
|
"description": "Select the chatbot to use for generating ticket summaries",
|
||||||
|
"required": True,
|
||||||
|
"options": [], # Will be populated dynamically from available chatbots
|
||||||
|
"placeholder": "Select a chatbot"
|
||||||
|
},
|
||||||
|
|
||||||
|
# Processing Settings
|
||||||
|
"process_state": {
|
||||||
|
"type": "select",
|
||||||
|
"title": "Ticket State to Process",
|
||||||
|
"description": "Which ticket state should be processed",
|
||||||
|
"required": False,
|
||||||
|
"default": "open",
|
||||||
|
"options": [
|
||||||
|
{"value": "open", "label": "Open"},
|
||||||
|
{"value": "pending", "label": "Pending"},
|
||||||
|
{"value": "closed", "label": "Closed"},
|
||||||
|
{"value": "all", "label": "All States"}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"max_tickets": {
|
||||||
|
"type": "integer",
|
||||||
|
"title": "Max Tickets Per Run",
|
||||||
|
"description": "Maximum number of tickets to process in a single batch",
|
||||||
|
"required": False,
|
||||||
|
"default": 10,
|
||||||
|
"minimum": 1,
|
||||||
|
"maximum": 100
|
||||||
|
},
|
||||||
|
"skip_existing": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Skip Already Processed",
|
||||||
|
"description": "Skip tickets that already have AI summaries",
|
||||||
|
"default": True,
|
||||||
|
"required": False
|
||||||
|
},
|
||||||
|
|
||||||
|
# Automation Settings
|
||||||
|
"auto_process": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Enable Auto Processing",
|
||||||
|
"description": "Automatically process new tickets at regular intervals",
|
||||||
|
"default": False,
|
||||||
|
"required": False
|
||||||
|
},
|
||||||
|
"process_interval": {
|
||||||
|
"type": "integer",
|
||||||
|
"title": "Processing Interval (minutes)",
|
||||||
|
"description": "How often to automatically process tickets (only if auto-process is enabled)",
|
||||||
|
"required": False,
|
||||||
|
"default": 30,
|
||||||
|
"minimum": 5,
|
||||||
|
"maximum": 1440,
|
||||||
|
"depends_on": {
|
||||||
|
"field": "auto_process",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
# Customization
|
||||||
|
"summary_template": {
|
||||||
|
"type": "textarea",
|
||||||
|
"title": "AI Summary Template",
|
||||||
|
"description": "Custom template for generating AI summaries. Leave empty for default.",
|
||||||
|
"required": False,
|
||||||
|
"placeholder": "Generate a concise summary of this support ticket including key issues, customer concerns, and any actions taken.",
|
||||||
|
"rows": 3
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["name", "zammad_url", "api_token", "chatbot_id"],
|
||||||
|
"field_groups": [
|
||||||
|
{
|
||||||
|
"title": "Basic Information",
|
||||||
|
"fields": ["name", "description", "is_default"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Zammad Connection",
|
||||||
|
"fields": ["zammad_url", "api_token"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "AI Configuration",
|
||||||
|
"fields": ["chatbot_id", "summary_template"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Processing Settings",
|
||||||
|
"fields": ["process_state", "max_tickets", "skip_existing"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Automation",
|
||||||
|
"fields": ["auto_process", "process_interval"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"validation": {
|
||||||
|
"connection_test": {
|
||||||
|
"endpoint": "/api/v1/zammad/test-connection",
|
||||||
|
"method": "POST",
|
||||||
|
"fields": ["zammad_url", "api_token"],
|
||||||
|
"success_message": "Connection to Zammad successful",
|
||||||
|
"error_field": "Connection failed"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_signal_configuration_schema() -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Returns the configuration schema for the Signal Bot plugin.
|
||||||
|
Based on the existing Signal module implementation.
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"type": "object",
|
||||||
|
"title": "Signal Bot Configuration",
|
||||||
|
"description": "Configure AI-powered Signal messaging bot with role-based permissions",
|
||||||
|
"properties": {
|
||||||
|
# Basic Settings
|
||||||
|
"enable_signal_bot": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Enable Signal Bot",
|
||||||
|
"description": "Turn the Signal bot on/off",
|
||||||
|
"default": False,
|
||||||
|
"required": True
|
||||||
|
},
|
||||||
|
"signal_service_url": {
|
||||||
|
"type": "url",
|
||||||
|
"title": "Signal Service URL",
|
||||||
|
"description": "Signal service endpoint (e.g., signal-cli-rest-api)",
|
||||||
|
"required": True,
|
||||||
|
"placeholder": "http://localhost:8080",
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_signal_bot",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"bot_phone_number": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "Bot Phone Number",
|
||||||
|
"description": "Registered Signal phone number for the bot",
|
||||||
|
"required": True,
|
||||||
|
"placeholder": "+1234567890",
|
||||||
|
"pattern": "^\\+[1-9]\\d{1,14}$",
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_signal_bot",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
# AI Settings
|
||||||
|
"model": {
|
||||||
|
"type": "select",
|
||||||
|
"title": "AI Model",
|
||||||
|
"description": "Choose the AI model for responses",
|
||||||
|
"required": False,
|
||||||
|
"default": "privatemode-llama-3-70b",
|
||||||
|
"options": [], # Will be populated from available models
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_signal_bot",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"temperature": {
|
||||||
|
"type": "number",
|
||||||
|
"title": "Response Creativity",
|
||||||
|
"description": "Control response creativity (0.0-1.0)",
|
||||||
|
"required": False,
|
||||||
|
"default": 0.7,
|
||||||
|
"minimum": 0.0,
|
||||||
|
"maximum": 1.0,
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_signal_bot",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"max_tokens": {
|
||||||
|
"type": "integer",
|
||||||
|
"title": "Max Response Length",
|
||||||
|
"description": "Maximum tokens in AI responses",
|
||||||
|
"required": False,
|
||||||
|
"default": 500,
|
||||||
|
"minimum": 50,
|
||||||
|
"maximum": 2000,
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_signal_bot",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"memory_length": {
|
||||||
|
"type": "integer",
|
||||||
|
"title": "Conversation Memory",
|
||||||
|
"description": "Number of message pairs to remember per user",
|
||||||
|
"required": False,
|
||||||
|
"default": 10,
|
||||||
|
"minimum": 1,
|
||||||
|
"maximum": 50,
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_signal_bot",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
# Permission Settings
|
||||||
|
"default_role": {
|
||||||
|
"type": "select",
|
||||||
|
"title": "Default User Role",
|
||||||
|
"description": "Role assigned to new Signal users",
|
||||||
|
"required": False,
|
||||||
|
"default": "user",
|
||||||
|
"options": [
|
||||||
|
{"value": "admin", "label": "Admin"},
|
||||||
|
{"value": "user", "label": "User"},
|
||||||
|
{"value": "disabled", "label": "Disabled"}
|
||||||
|
],
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_signal_bot",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"auto_register": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Auto-Register New Users",
|
||||||
|
"description": "Automatically register new Signal users",
|
||||||
|
"default": True,
|
||||||
|
"required": False,
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_signal_bot",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"admin_phone_numbers": {
|
||||||
|
"type": "textarea",
|
||||||
|
"title": "Admin Phone Numbers",
|
||||||
|
"description": "Phone numbers with admin privileges (one per line)",
|
||||||
|
"required": False,
|
||||||
|
"placeholder": "+1234567890\n+0987654321",
|
||||||
|
"rows": 3,
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_signal_bot",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
# Bot Behavior
|
||||||
|
"command_prefix": {
|
||||||
|
"type": "string",
|
||||||
|
"title": "Command Prefix",
|
||||||
|
"description": "Prefix for bot commands",
|
||||||
|
"required": False,
|
||||||
|
"default": "!",
|
||||||
|
"placeholder": "!",
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_signal_bot",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"log_conversations": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Log Conversations",
|
||||||
|
"description": "Enable conversation logging for analytics",
|
||||||
|
"default": False,
|
||||||
|
"required": False,
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_signal_bot",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["enable_signal_bot"],
|
||||||
|
"field_groups": [
|
||||||
|
{
|
||||||
|
"title": "Basic Settings",
|
||||||
|
"fields": ["enable_signal_bot", "signal_service_url", "bot_phone_number"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "AI Configuration",
|
||||||
|
"fields": ["model", "temperature", "max_tokens", "memory_length"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Permission Settings",
|
||||||
|
"fields": ["default_role", "auto_register", "admin_phone_numbers"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Bot Behavior",
|
||||||
|
"fields": ["command_prefix", "log_conversations"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"validation": {
|
||||||
|
"signal_test": {
|
||||||
|
"endpoint": "/api/v1/signal/test-connection",
|
||||||
|
"method": "POST",
|
||||||
|
"fields": ["signal_service_url", "bot_phone_number"],
|
||||||
|
"success_message": "Signal service connection successful",
|
||||||
|
"error_field": "Signal connection failed"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_email_assistant_configuration_schema() -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Returns the configuration schema for the Email Assistant plugin.
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"type": "object",
|
||||||
|
"title": "Email Assistant Configuration",
|
||||||
|
"description": "Configure AI-powered email management and auto-response system",
|
||||||
|
"properties": {
|
||||||
|
# Basic Settings
|
||||||
|
"enable_email_assistant": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Enable Email Assistant",
|
||||||
|
"description": "Turn the email assistant on/off",
|
||||||
|
"default": False,
|
||||||
|
"required": True
|
||||||
|
},
|
||||||
|
"email_provider": {
|
||||||
|
"type": "select",
|
||||||
|
"title": "Email Provider",
|
||||||
|
"description": "Select your email service provider",
|
||||||
|
"required": True,
|
||||||
|
"options": [
|
||||||
|
{"value": "gmail", "label": "Gmail"},
|
||||||
|
{"value": "outlook", "label": "Outlook/Hotmail"},
|
||||||
|
{"value": "imap", "label": "Generic IMAP"},
|
||||||
|
{"value": "exchange", "label": "Exchange Server"}
|
||||||
|
],
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_email_assistant",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"email_address": {
|
||||||
|
"type": "email",
|
||||||
|
"title": "Email Address",
|
||||||
|
"description": "Your email address for the assistant to monitor",
|
||||||
|
"required": True,
|
||||||
|
"placeholder": "your-email@example.com",
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_email_assistant",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
# AI Configuration
|
||||||
|
"auto_response_enabled": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Enable Auto-Response",
|
||||||
|
"description": "Automatically respond to incoming emails",
|
||||||
|
"default": False,
|
||||||
|
"required": False,
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_email_assistant",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"response_template": {
|
||||||
|
"type": "textarea",
|
||||||
|
"title": "Auto-Response Template",
|
||||||
|
"description": "Template for automatic responses",
|
||||||
|
"required": False,
|
||||||
|
"placeholder": "Thank you for your email. I'll respond within 24 hours.",
|
||||||
|
"rows": 3,
|
||||||
|
"depends_on": {
|
||||||
|
"field": "auto_response_enabled",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
# Processing Settings
|
||||||
|
"check_interval": {
|
||||||
|
"type": "integer",
|
||||||
|
"title": "Check Interval (minutes)",
|
||||||
|
"description": "How often to check for new emails",
|
||||||
|
"required": False,
|
||||||
|
"default": 15,
|
||||||
|
"minimum": 1,
|
||||||
|
"maximum": 1440,
|
||||||
|
"depends_on": {
|
||||||
|
"field": "enable_email_assistant",
|
||||||
|
"value": True
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["enable_email_assistant"],
|
||||||
|
"field_groups": [
|
||||||
|
{
|
||||||
|
"title": "Basic Settings",
|
||||||
|
"fields": ["enable_email_assistant", "email_provider", "email_address"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Auto-Response",
|
||||||
|
"fields": ["auto_response_enabled", "response_template"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Processing Settings",
|
||||||
|
"fields": ["check_interval"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_plugin_configuration_schema(plugin_id: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Get the configuration schema for a specific plugin.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
plugin_id: The ID of the plugin
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The configuration schema dictionary or None if not found
|
||||||
|
"""
|
||||||
|
schemas = {
|
||||||
|
"zammad": get_zammad_configuration_schema,
|
||||||
|
"signal": get_signal_configuration_schema,
|
||||||
|
"email-assistant": get_email_assistant_configuration_schema
|
||||||
|
}
|
||||||
|
|
||||||
|
schema_func = schemas.get(plugin_id)
|
||||||
|
return schema_func() if schema_func else None
|
||||||
|
|
||||||
|
|
||||||
719
backend/app/services/plugin_security.py
Normal file
719
backend/app/services/plugin_security.py
Normal file
@@ -0,0 +1,719 @@
|
|||||||
|
"""
|
||||||
|
Plugin Security and Authentication Service
|
||||||
|
Handles plugin tokens, permissions, and security policies
|
||||||
|
"""
|
||||||
|
import jwt
|
||||||
|
import hashlib
|
||||||
|
import secrets
|
||||||
|
import time
|
||||||
|
import redis
|
||||||
|
from typing import Dict, Any, List, Optional, Set, Tuple
|
||||||
|
from datetime import datetime, timezone, timedelta
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from cryptography.fernet import Fernet
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
from app.models.plugin import Plugin, PluginConfiguration, PluginAuditLog, PluginPermission
|
||||||
|
from app.models.user import User
|
||||||
|
from app.models.api_key import APIKey
|
||||||
|
from app.db.database import get_db
|
||||||
|
from app.utils.exceptions import SecurityError, PluginError
|
||||||
|
|
||||||
|
|
||||||
|
logger = get_logger("plugin.security")
|
||||||
|
|
||||||
|
|
||||||
|
class PluginTokenManager:
|
||||||
|
"""Manages plugin authentication tokens"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.secret_key = settings.JWT_SECRET
|
||||||
|
self.encryption_key = self._get_or_create_encryption_key()
|
||||||
|
self.cipher_suite = Fernet(self.encryption_key)
|
||||||
|
|
||||||
|
# Initialize Redis connection for token blacklist
|
||||||
|
try:
|
||||||
|
self.redis_client = redis.from_url(settings.REDIS_URL, decode_responses=True)
|
||||||
|
# Test connection
|
||||||
|
self.redis_client.ping()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to connect to Redis for token blacklist: {e}")
|
||||||
|
self.redis_client = None
|
||||||
|
|
||||||
|
def _get_or_create_encryption_key(self) -> bytes:
|
||||||
|
"""Get or create encryption key for plugin secrets"""
|
||||||
|
# First, try to get from environment variable (production)
|
||||||
|
if settings.PLUGIN_ENCRYPTION_KEY:
|
||||||
|
try:
|
||||||
|
# Environment variable should contain base64-encoded key
|
||||||
|
import base64
|
||||||
|
return base64.b64decode(settings.PLUGIN_ENCRYPTION_KEY.encode())
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Invalid PLUGIN_ENCRYPTION_KEY in environment: {e}")
|
||||||
|
raise SecurityError("Invalid encryption key configuration")
|
||||||
|
|
||||||
|
# Development fallback: generate and store in data directory
|
||||||
|
data_dir = Path("/data/plugin_keys")
|
||||||
|
data_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
key_file = data_dir / "encryption.key"
|
||||||
|
|
||||||
|
try:
|
||||||
|
if key_file.exists():
|
||||||
|
return key_file.read_bytes()
|
||||||
|
else:
|
||||||
|
# Generate new key for development
|
||||||
|
key = Fernet.generate_key()
|
||||||
|
key_file.write_bytes(key)
|
||||||
|
|
||||||
|
import base64
|
||||||
|
logger.warning(
|
||||||
|
f"Generated new plugin encryption key for development. "
|
||||||
|
f"For production, set PLUGIN_ENCRYPTION_KEY environment variable to: "
|
||||||
|
f"{base64.b64encode(key).decode()}"
|
||||||
|
)
|
||||||
|
return key
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to manage encryption key: {e}")
|
||||||
|
raise SecurityError(f"Encryption key management failed: {e}")
|
||||||
|
|
||||||
|
def generate_plugin_token(self, plugin_id: str, user_id: str,
|
||||||
|
permissions: List[str], expires_hours: int = 24) -> str:
|
||||||
|
"""Generate JWT token for plugin authentication"""
|
||||||
|
try:
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
expiration = now + timedelta(hours=expires_hours)
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'sub': user_id,
|
||||||
|
'plugin_id': plugin_id,
|
||||||
|
'permissions': permissions,
|
||||||
|
'iat': int(now.timestamp()),
|
||||||
|
'exp': int(expiration.timestamp()),
|
||||||
|
'aud': 'enclava-plugin',
|
||||||
|
'iss': 'enclava-platform',
|
||||||
|
'jti': secrets.token_urlsafe(16) # JWT ID for revocation
|
||||||
|
}
|
||||||
|
|
||||||
|
token = jwt.encode(payload, self.secret_key, algorithm='HS256')
|
||||||
|
|
||||||
|
logger.info(f"Generated plugin token for {plugin_id} (user: {user_id})")
|
||||||
|
return token
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to generate plugin token: {e}")
|
||||||
|
raise SecurityError(f"Token generation failed: {e}")
|
||||||
|
|
||||||
|
def verify_plugin_token(self, token: str) -> Tuple[bool, Optional[Dict[str, Any]]]:
|
||||||
|
"""Verify and decode plugin token"""
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(
|
||||||
|
token,
|
||||||
|
self.secret_key,
|
||||||
|
algorithms=['HS256'],
|
||||||
|
audience='enclava-plugin',
|
||||||
|
issuer='enclava-platform'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Additional validation
|
||||||
|
if 'plugin_id' not in payload or 'sub' not in payload:
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
# Check if specific token is revoked
|
||||||
|
if self._is_token_revoked(payload.get('jti')):
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
# Check if plugin/user tokens are revoked
|
||||||
|
plugin_id = payload.get('plugin_id')
|
||||||
|
user_id = payload.get('sub')
|
||||||
|
if self._is_plugin_user_revoked(plugin_id, user_id):
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
return True, payload
|
||||||
|
|
||||||
|
except jwt.InvalidTokenError as e:
|
||||||
|
logger.warning(f"Invalid plugin token: {e}")
|
||||||
|
return False, None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Token verification failed: {e}")
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
def _is_token_revoked(self, jti: str) -> bool:
|
||||||
|
"""Check if token is revoked using Redis blacklist"""
|
||||||
|
if not jti or not self.redis_client:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if token JTI exists in blacklist
|
||||||
|
blacklist_key = f"plugin_token_blacklist:{jti}"
|
||||||
|
is_revoked = self.redis_client.exists(blacklist_key)
|
||||||
|
|
||||||
|
if is_revoked:
|
||||||
|
logger.debug(f"Token {jti} found in blacklist")
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to check token blacklist: {e}")
|
||||||
|
# Fail secure - if we can't check blacklist, assume token is valid
|
||||||
|
# This prevents service disruption from Redis issues
|
||||||
|
return False
|
||||||
|
|
||||||
|
def revoke_plugin_tokens(self, plugin_id: str, user_id: Optional[str] = None) -> bool:
|
||||||
|
"""Revoke all tokens for a plugin or user"""
|
||||||
|
try:
|
||||||
|
if not self.redis_client:
|
||||||
|
logger.error("Redis not available for token revocation")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# For this implementation, we'll mark the plugin/user combination as revoked
|
||||||
|
# In a production system, you'd want to track individual JTI tokens
|
||||||
|
revocation_key = f"plugin_revocation:{plugin_id}"
|
||||||
|
if user_id:
|
||||||
|
revocation_key += f":user:{user_id}"
|
||||||
|
|
||||||
|
# Set revocation flag with 7-day expiration (max token lifetime)
|
||||||
|
expiration_seconds = 7 * 24 * 60 * 60 # 7 days
|
||||||
|
self.redis_client.setex(
|
||||||
|
revocation_key,
|
||||||
|
expiration_seconds,
|
||||||
|
int(time.time())
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Revoked plugin tokens for {plugin_id} (user: {user_id})")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to revoke tokens: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def revoke_specific_token(self, jti: str, expires_at: datetime) -> bool:
|
||||||
|
"""Revoke a specific token by adding its JTI to blacklist"""
|
||||||
|
try:
|
||||||
|
if not jti or not self.redis_client:
|
||||||
|
logger.error("Cannot revoke token: missing JTI or Redis unavailable")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Calculate time until token expires
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
if expires_at <= now:
|
||||||
|
# Token already expired, no need to blacklist
|
||||||
|
return True
|
||||||
|
|
||||||
|
ttl_seconds = int((expires_at - now).total_seconds())
|
||||||
|
|
||||||
|
# Add JTI to blacklist with TTL matching token expiration
|
||||||
|
blacklist_key = f"plugin_token_blacklist:{jti}"
|
||||||
|
self.redis_client.setex(blacklist_key, ttl_seconds, int(time.time()))
|
||||||
|
|
||||||
|
logger.info(f"Revoked token {jti}, expires in {ttl_seconds} seconds")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to revoke specific token {jti}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def cleanup_expired_revocations(self) -> int:
|
||||||
|
"""Clean up expired token revocations (Redis TTL handles this automatically)"""
|
||||||
|
try:
|
||||||
|
if not self.redis_client:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Redis TTL automatically cleans up expired keys
|
||||||
|
# This method is for manual cleanup or statistics
|
||||||
|
|
||||||
|
# Count current blacklisted tokens
|
||||||
|
pattern = "plugin_token_blacklist:*"
|
||||||
|
blacklisted_count = len(self.redis_client.keys(pattern))
|
||||||
|
|
||||||
|
logger.debug(f"Current blacklisted tokens: {blacklisted_count}")
|
||||||
|
return blacklisted_count
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to cleanup revocations: {e}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def _is_plugin_user_revoked(self, plugin_id: str, user_id: str) -> bool:
|
||||||
|
"""Check if all tokens for a plugin/user combination are revoked"""
|
||||||
|
if not plugin_id or not user_id or not self.redis_client:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check plugin-level revocation
|
||||||
|
plugin_revocation_key = f"plugin_revocation:{plugin_id}"
|
||||||
|
if self.redis_client.exists(plugin_revocation_key):
|
||||||
|
logger.debug(f"Plugin {plugin_id} tokens are revoked")
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check user-specific revocation for this plugin
|
||||||
|
user_revocation_key = f"plugin_revocation:{plugin_id}:user:{user_id}"
|
||||||
|
if self.redis_client.exists(user_revocation_key):
|
||||||
|
logger.debug(f"Plugin {plugin_id} tokens revoked for user {user_id}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to check plugin/user revocation: {e}")
|
||||||
|
# Fail secure - if we can't check, assume not revoked
|
||||||
|
return False
|
||||||
|
|
||||||
|
def encrypt_plugin_secret(self, secret: str) -> str:
|
||||||
|
"""Encrypt plugin secret for storage"""
|
||||||
|
try:
|
||||||
|
encrypted = self.cipher_suite.encrypt(secret.encode())
|
||||||
|
return encrypted.decode()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to encrypt secret: {e}")
|
||||||
|
raise SecurityError("Secret encryption failed")
|
||||||
|
|
||||||
|
def decrypt_plugin_secret(self, encrypted_secret: str) -> str:
|
||||||
|
"""Decrypt plugin secret"""
|
||||||
|
try:
|
||||||
|
decrypted = self.cipher_suite.decrypt(encrypted_secret.encode())
|
||||||
|
return decrypted.decode()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to decrypt secret: {e}")
|
||||||
|
raise SecurityError("Secret decryption failed")
|
||||||
|
|
||||||
|
def get_revocation_status(self, plugin_id: str, user_id: Optional[str] = None) -> Dict[str, Any]:
|
||||||
|
"""Get revocation status for plugin and user"""
|
||||||
|
try:
|
||||||
|
if not self.redis_client:
|
||||||
|
return {"status": "unknown", "error": "Redis unavailable"}
|
||||||
|
|
||||||
|
status = {
|
||||||
|
"plugin_id": plugin_id,
|
||||||
|
"user_id": user_id,
|
||||||
|
"plugin_revoked": False,
|
||||||
|
"user_revoked": False,
|
||||||
|
"revoked_at": None
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check plugin-level revocation
|
||||||
|
plugin_key = f"plugin_revocation:{plugin_id}"
|
||||||
|
if self.redis_client.exists(plugin_key):
|
||||||
|
status["plugin_revoked"] = True
|
||||||
|
revoked_timestamp = self.redis_client.get(plugin_key)
|
||||||
|
if revoked_timestamp:
|
||||||
|
status["revoked_at"] = int(revoked_timestamp)
|
||||||
|
|
||||||
|
# Check user-specific revocation
|
||||||
|
if user_id:
|
||||||
|
user_key = f"plugin_revocation:{plugin_id}:user:{user_id}"
|
||||||
|
if self.redis_client.exists(user_key):
|
||||||
|
status["user_revoked"] = True
|
||||||
|
revoked_timestamp = self.redis_client.get(user_key)
|
||||||
|
if revoked_timestamp:
|
||||||
|
status["revoked_at"] = int(revoked_timestamp)
|
||||||
|
|
||||||
|
return status
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get revocation status: {e}")
|
||||||
|
return {"status": "error", "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
class PluginPermissionManager:
|
||||||
|
"""Manages plugin permissions and access control"""
|
||||||
|
|
||||||
|
PLATFORM_API_PERMISSIONS = {
|
||||||
|
'chatbot:invoke': 'Invoke chatbot conversations',
|
||||||
|
'chatbot:manage': 'Manage chatbot instances',
|
||||||
|
'chatbot:read': 'Read chatbot configurations',
|
||||||
|
|
||||||
|
'rag:query': 'Query RAG collections',
|
||||||
|
'rag:manage': 'Manage RAG collections and documents',
|
||||||
|
'rag:read': 'Read RAG collection metadata',
|
||||||
|
|
||||||
|
'llm:completion': 'Generate LLM completions',
|
||||||
|
'llm:embeddings': 'Generate text embeddings',
|
||||||
|
'llm:models': 'List available LLM models',
|
||||||
|
|
||||||
|
'workflow:execute': 'Execute workflow processes',
|
||||||
|
'workflow:read': 'Read workflow definitions',
|
||||||
|
|
||||||
|
'cache:read': 'Read cached data',
|
||||||
|
'cache:write': 'Write cached data',
|
||||||
|
|
||||||
|
'user:read': 'Read user profile data',
|
||||||
|
'user:settings': 'Access user settings',
|
||||||
|
|
||||||
|
'admin:users': 'Manage users (admin only)',
|
||||||
|
'admin:system': 'System administration (admin only)'
|
||||||
|
}
|
||||||
|
|
||||||
|
PLUGIN_SCOPE_PERMISSIONS = {
|
||||||
|
'read': 'Read plugin data',
|
||||||
|
'write': 'Modify plugin data',
|
||||||
|
'config': 'Manage plugin configuration',
|
||||||
|
'install': 'Install/uninstall plugin',
|
||||||
|
'execute': 'Execute plugin functions'
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.permission_cache: Dict[str, Set[str]] = {}
|
||||||
|
|
||||||
|
def validate_permissions(self, requested_permissions: List[str]) -> Tuple[bool, List[str]]:
|
||||||
|
"""Validate requested permissions against allowed permissions"""
|
||||||
|
valid_permissions = set(self.PLATFORM_API_PERMISSIONS.keys()) | set(self.PLUGIN_SCOPE_PERMISSIONS.keys())
|
||||||
|
|
||||||
|
invalid_permissions = []
|
||||||
|
for permission in requested_permissions:
|
||||||
|
if permission.endswith(':*'):
|
||||||
|
# Wildcard permission - check if base exists
|
||||||
|
base_permission = permission[:-2]
|
||||||
|
if not any(p.startswith(base_permission + ':') for p in valid_permissions):
|
||||||
|
invalid_permissions.append(permission)
|
||||||
|
elif permission not in valid_permissions:
|
||||||
|
invalid_permissions.append(permission)
|
||||||
|
|
||||||
|
return len(invalid_permissions) == 0, invalid_permissions
|
||||||
|
|
||||||
|
def check_permission(self, user_id: str, plugin_id: str,
|
||||||
|
permission: str, db: Session) -> bool:
|
||||||
|
"""Check if user has permission for plugin action"""
|
||||||
|
try:
|
||||||
|
# Get user permissions from cache or database
|
||||||
|
cache_key = f"{user_id}:{plugin_id}"
|
||||||
|
if cache_key not in self.permission_cache:
|
||||||
|
self._load_user_permissions(user_id, plugin_id, db)
|
||||||
|
|
||||||
|
user_permissions = self.permission_cache.get(cache_key, set())
|
||||||
|
|
||||||
|
# Check exact permission match
|
||||||
|
if permission in user_permissions:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check wildcard permissions
|
||||||
|
permission_parts = permission.split(':')
|
||||||
|
if len(permission_parts) == 2:
|
||||||
|
wildcard_permission = f"{permission_parts[0]}:*"
|
||||||
|
if wildcard_permission in user_permissions:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Permission check failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _load_user_permissions(self, user_id: str, plugin_id: str, db: Session):
|
||||||
|
"""Load user permissions for plugin from database"""
|
||||||
|
try:
|
||||||
|
# Get user
|
||||||
|
user = db.query(User).filter(User.id == user_id).first()
|
||||||
|
if not user:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get plugin configuration
|
||||||
|
config = db.query(PluginConfiguration).filter(
|
||||||
|
PluginConfiguration.user_id == user_id,
|
||||||
|
PluginConfiguration.plugin_id == plugin_id,
|
||||||
|
PluginConfiguration.is_active == True
|
||||||
|
).first()
|
||||||
|
|
||||||
|
permissions = set()
|
||||||
|
|
||||||
|
# Add base plugin permissions
|
||||||
|
if config:
|
||||||
|
permissions.update(self.PLUGIN_SCOPE_PERMISSIONS.keys())
|
||||||
|
|
||||||
|
# Add platform API permissions based on plugin manifest
|
||||||
|
plugin = db.query(Plugin).filter(Plugin.id == plugin_id).first()
|
||||||
|
if plugin and plugin.manifest_data:
|
||||||
|
manifest_permissions = plugin.manifest_data.get('spec', {}).get('permissions', {}).get('platform_apis', [])
|
||||||
|
permissions.update(manifest_permissions)
|
||||||
|
|
||||||
|
# Add explicitly granted permissions from database
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
explicitly_granted = db.query(PluginPermission).filter(
|
||||||
|
PluginPermission.plugin_id == plugin_id,
|
||||||
|
PluginPermission.user_id == user_id,
|
||||||
|
PluginPermission.granted == True
|
||||||
|
).filter(
|
||||||
|
# Only include non-expired permissions
|
||||||
|
(PluginPermission.expires_at.is_(None)) |
|
||||||
|
(PluginPermission.expires_at > datetime.now(timezone.utc))
|
||||||
|
).all()
|
||||||
|
|
||||||
|
for permission_record in explicitly_granted:
|
||||||
|
permissions.add(permission_record.permission_name)
|
||||||
|
|
||||||
|
# Add admin permissions if user is admin
|
||||||
|
if hasattr(user, 'is_admin') and user.is_admin:
|
||||||
|
permissions.update(['admin:users', 'admin:system'])
|
||||||
|
|
||||||
|
# Cache permissions
|
||||||
|
cache_key = f"{user_id}:{plugin_id}"
|
||||||
|
self.permission_cache[cache_key] = permissions
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to load user permissions: {e}")
|
||||||
|
|
||||||
|
def get_user_permissions(self, user_id: str, plugin_id: str, db: Session) -> List[str]:
|
||||||
|
"""Get list of permissions for user and plugin"""
|
||||||
|
cache_key = f"{user_id}:{plugin_id}"
|
||||||
|
if cache_key not in self.permission_cache:
|
||||||
|
self._load_user_permissions(user_id, plugin_id, db)
|
||||||
|
|
||||||
|
return list(self.permission_cache.get(cache_key, set()))
|
||||||
|
|
||||||
|
def grant_permission(self, user_id: str, plugin_id: str,
|
||||||
|
permission: str, granted_by: str, db: Session) -> bool:
|
||||||
|
"""Grant permission to user for plugin"""
|
||||||
|
try:
|
||||||
|
# Validate permission
|
||||||
|
valid, invalid = self.validate_permissions([permission])
|
||||||
|
if not valid:
|
||||||
|
raise SecurityError(f"Invalid permission: {permission}")
|
||||||
|
|
||||||
|
# Store permission grant in database
|
||||||
|
permission_record = PluginPermission(
|
||||||
|
plugin_id=plugin_id,
|
||||||
|
user_id=user_id,
|
||||||
|
permission_name=permission,
|
||||||
|
granted=True,
|
||||||
|
granted_by_user_id=granted_by,
|
||||||
|
reason=f"Permission granted by user {granted_by}"
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(permission_record)
|
||||||
|
|
||||||
|
# Invalidate cache to force reload
|
||||||
|
cache_key = f"{user_id}:{plugin_id}"
|
||||||
|
if cache_key in self.permission_cache:
|
||||||
|
del self.permission_cache[cache_key]
|
||||||
|
|
||||||
|
# Log permission grant
|
||||||
|
audit_log = PluginAuditLog(
|
||||||
|
plugin_id=plugin_id,
|
||||||
|
user_id=user_id,
|
||||||
|
action="grant_permission",
|
||||||
|
details={
|
||||||
|
"permission": permission,
|
||||||
|
"granted_by": granted_by
|
||||||
|
}
|
||||||
|
)
|
||||||
|
db.add(audit_log)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to grant permission: {e}")
|
||||||
|
db.rollback()
|
||||||
|
return False
|
||||||
|
|
||||||
|
def revoke_permission(self, user_id: str, plugin_id: str,
|
||||||
|
permission: str, revoked_by: str, db: Session) -> bool:
|
||||||
|
"""Revoke permission from user for plugin"""
|
||||||
|
try:
|
||||||
|
# Mark permission as revoked in database
|
||||||
|
permission_record = db.query(PluginPermission).filter(
|
||||||
|
PluginPermission.plugin_id == plugin_id,
|
||||||
|
PluginPermission.user_id == user_id,
|
||||||
|
PluginPermission.permission_name == permission,
|
||||||
|
PluginPermission.granted == True
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if permission_record:
|
||||||
|
# Mark as revoked
|
||||||
|
permission_record.granted = False
|
||||||
|
permission_record.revoked_at = func.now()
|
||||||
|
permission_record.revoked_by_user_id = revoked_by
|
||||||
|
permission_record.reason = f"Permission revoked by user {revoked_by}"
|
||||||
|
else:
|
||||||
|
logger.warning(f"Permission {permission} not found for user {user_id}, plugin {plugin_id}")
|
||||||
|
|
||||||
|
# Invalidate cache to force reload
|
||||||
|
cache_key = f"{user_id}:{plugin_id}"
|
||||||
|
if cache_key in self.permission_cache:
|
||||||
|
del self.permission_cache[cache_key]
|
||||||
|
|
||||||
|
# Log permission revocation
|
||||||
|
audit_log = PluginAuditLog(
|
||||||
|
plugin_id=plugin_id,
|
||||||
|
user_id=user_id,
|
||||||
|
action="revoke_permission",
|
||||||
|
details={
|
||||||
|
"permission": permission,
|
||||||
|
"revoked_by": revoked_by
|
||||||
|
}
|
||||||
|
)
|
||||||
|
db.add(audit_log)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to revoke permission: {e}")
|
||||||
|
db.rollback()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class PluginSecurityPolicyManager:
|
||||||
|
"""Manages security policies for plugins"""
|
||||||
|
|
||||||
|
DEFAULT_SECURITY_POLICY = {
|
||||||
|
'max_api_calls_per_minute': 100,
|
||||||
|
'max_memory_mb': 128,
|
||||||
|
'max_cpu_percent': 25,
|
||||||
|
'max_disk_mb': 100,
|
||||||
|
'max_network_connections': 10,
|
||||||
|
'allowed_domains': [],
|
||||||
|
'blocked_domains': ['localhost', '127.0.0.1', '0.0.0.0'],
|
||||||
|
'require_https': True,
|
||||||
|
'allow_file_access': False,
|
||||||
|
'allow_system_calls': False,
|
||||||
|
'enable_audit_logging': True,
|
||||||
|
'token_expires_hours': 24,
|
||||||
|
'max_token_lifetime_hours': 168 # 1 week
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.policy_cache: Dict[str, Dict[str, Any]] = {}
|
||||||
|
|
||||||
|
def get_security_policy(self, plugin_id: str, db: Session) -> Dict[str, Any]:
|
||||||
|
"""Get security policy for plugin"""
|
||||||
|
if plugin_id in self.policy_cache:
|
||||||
|
return self.policy_cache[plugin_id]
|
||||||
|
|
||||||
|
try:
|
||||||
|
plugin = db.query(Plugin).filter(Plugin.id == plugin_id).first()
|
||||||
|
if not plugin:
|
||||||
|
return self.DEFAULT_SECURITY_POLICY.copy()
|
||||||
|
|
||||||
|
# Start with default policy
|
||||||
|
policy = self.DEFAULT_SECURITY_POLICY.copy()
|
||||||
|
|
||||||
|
# Override with plugin manifest settings
|
||||||
|
if plugin.manifest_data:
|
||||||
|
manifest_policy = plugin.manifest_data.get('spec', {}).get('security_policy', {})
|
||||||
|
policy.update(manifest_policy)
|
||||||
|
|
||||||
|
# Add allowed domains from manifest
|
||||||
|
external_services = plugin.manifest_data.get('spec', {}).get('external_services', {})
|
||||||
|
if external_services.get('allowed_domains'):
|
||||||
|
policy['allowed_domains'].extend(external_services['allowed_domains'])
|
||||||
|
|
||||||
|
# Cache policy
|
||||||
|
self.policy_cache[plugin_id] = policy
|
||||||
|
return policy
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get security policy for {plugin_id}: {e}")
|
||||||
|
return self.DEFAULT_SECURITY_POLICY.copy()
|
||||||
|
|
||||||
|
def validate_security_policy(self, policy: Dict[str, Any]) -> Tuple[bool, List[str]]:
|
||||||
|
"""Validate security policy configuration"""
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
# Check required fields
|
||||||
|
required_fields = ['max_api_calls_per_minute', 'max_memory_mb', 'token_expires_hours']
|
||||||
|
for field in required_fields:
|
||||||
|
if field not in policy:
|
||||||
|
errors.append(f"Missing required field: {field}")
|
||||||
|
|
||||||
|
# Validate numeric limits
|
||||||
|
numeric_limits = {
|
||||||
|
'max_api_calls_per_minute': (1, 1000),
|
||||||
|
'max_memory_mb': (16, 1024),
|
||||||
|
'max_cpu_percent': (1, 100),
|
||||||
|
'max_disk_mb': (10, 10240),
|
||||||
|
'token_expires_hours': (1, 168)
|
||||||
|
}
|
||||||
|
|
||||||
|
for field, (min_val, max_val) in numeric_limits.items():
|
||||||
|
if field in policy:
|
||||||
|
value = policy[field]
|
||||||
|
if not isinstance(value, (int, float)) or value < min_val or value > max_val:
|
||||||
|
errors.append(f"{field} must be between {min_val} and {max_val}")
|
||||||
|
|
||||||
|
# Validate domains
|
||||||
|
if 'allowed_domains' in policy:
|
||||||
|
if not isinstance(policy['allowed_domains'], list):
|
||||||
|
errors.append("allowed_domains must be a list")
|
||||||
|
|
||||||
|
return len(errors) == 0, errors
|
||||||
|
|
||||||
|
def update_security_policy(self, plugin_id: str, policy: Dict[str, Any],
|
||||||
|
updated_by: str, db: Session) -> bool:
|
||||||
|
"""Update security policy for plugin"""
|
||||||
|
try:
|
||||||
|
# Validate policy
|
||||||
|
valid, errors = self.validate_security_policy(policy)
|
||||||
|
if not valid:
|
||||||
|
raise SecurityError(f"Invalid security policy: {errors}")
|
||||||
|
|
||||||
|
# TODO: Store policy in database
|
||||||
|
# For now, update cache
|
||||||
|
self.policy_cache[plugin_id] = policy
|
||||||
|
|
||||||
|
# Log policy update
|
||||||
|
audit_log = PluginAuditLog(
|
||||||
|
plugin_id=plugin_id,
|
||||||
|
action="update_security_policy",
|
||||||
|
details={
|
||||||
|
"policy": policy,
|
||||||
|
"updated_by": updated_by
|
||||||
|
}
|
||||||
|
)
|
||||||
|
db.add(audit_log)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to update security policy for {plugin_id}: {e}")
|
||||||
|
db.rollback()
|
||||||
|
return False
|
||||||
|
|
||||||
|
def check_policy_compliance(self, plugin_id: str, action: str,
|
||||||
|
context: Dict[str, Any], db: Session) -> bool:
|
||||||
|
"""Check if action complies with plugin security policy"""
|
||||||
|
try:
|
||||||
|
policy = self.get_security_policy(plugin_id, db)
|
||||||
|
|
||||||
|
# Check specific action types
|
||||||
|
if action == 'api_call':
|
||||||
|
# Check rate limits (would need rate limiter integration)
|
||||||
|
return True
|
||||||
|
|
||||||
|
elif action == 'network_access':
|
||||||
|
domain = context.get('domain')
|
||||||
|
if not domain:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check blocked domains
|
||||||
|
for blocked in policy.get('blocked_domains', []):
|
||||||
|
if domain.endswith(blocked):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check allowed domains if specified
|
||||||
|
allowed_domains = policy.get('allowed_domains', [])
|
||||||
|
if allowed_domains:
|
||||||
|
return any(domain.endswith(allowed) for allowed in allowed_domains)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
elif action == 'file_access':
|
||||||
|
return policy.get('allow_file_access', False)
|
||||||
|
|
||||||
|
elif action == 'system_call':
|
||||||
|
return policy.get('allow_system_calls', False)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Policy compliance check failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# Global instances
|
||||||
|
plugin_token_manager = PluginTokenManager()
|
||||||
|
plugin_permission_manager = PluginPermissionManager()
|
||||||
|
plugin_security_policy_manager = PluginSecurityPolicyManager()
|
||||||
51
frontend/src/app/api/v1/chatbot/list/route.ts
Normal file
51
frontend/src/app/api/v1/chatbot/list/route.ts
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
|
|
||||||
|
const BACKEND_URL = process.env.INTERNAL_API_URL || 'http://enclava-backend:8000'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const token = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(`${BACKEND_URL}/api/v1/chatbot/list`, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'Authorization': token,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.text()
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to fetch chatbots', details: errorData },
|
||||||
|
{ status: response.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
// Transform the response to match expected format for plugin configuration
|
||||||
|
const chatbots = Array.isArray(data) ? data : (data.chatbots || [])
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
chatbots: chatbots.map((chatbot: any) => ({
|
||||||
|
id: chatbot.id,
|
||||||
|
name: chatbot.name || 'Unnamed Chatbot',
|
||||||
|
chatbot_type: chatbot.config?.chatbot_type || 'assistant',
|
||||||
|
description: chatbot.description || '',
|
||||||
|
created_at: chatbot.created_at,
|
||||||
|
is_active: chatbot.is_active
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching chatbots:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Internal server error' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
93
frontend/src/app/api/v1/plugins/[pluginId]/config/route.ts
Normal file
93
frontend/src/app/api/v1/plugins/[pluginId]/config/route.ts
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
|
|
||||||
|
export async function GET(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: { pluginId: string } }
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
// Extract authorization header from the incoming request
|
||||||
|
const authHeader = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!authHeader) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authorization header required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { pluginId } = params
|
||||||
|
|
||||||
|
// Make request to backend plugins config endpoint
|
||||||
|
const baseUrl = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL
|
||||||
|
const url = `${baseUrl}/api/v1/plugins/${pluginId}/config`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'Authorization': authHeader,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(data, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error getting plugin config:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to get plugin config' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function POST(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: { pluginId: string } }
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
// Extract authorization header from the incoming request
|
||||||
|
const authHeader = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!authHeader) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authorization header required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const { pluginId } = params
|
||||||
|
|
||||||
|
// Make request to backend plugins config endpoint
|
||||||
|
const baseUrl = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL
|
||||||
|
const url = `${baseUrl}/api/v1/plugins/${pluginId}/config`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Authorization': authHeader,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify(body)
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(data, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error saving plugin config:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to save plugin config' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
46
frontend/src/app/api/v1/plugins/[pluginId]/disable/route.ts
Normal file
46
frontend/src/app/api/v1/plugins/[pluginId]/disable/route.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
|
|
||||||
|
export async function POST(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: { pluginId: string } }
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
// Extract authorization header from the incoming request
|
||||||
|
const authHeader = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!authHeader) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authorization header required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { pluginId } = params
|
||||||
|
|
||||||
|
// Make request to backend plugins disable endpoint
|
||||||
|
const baseUrl = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL
|
||||||
|
const url = `${baseUrl}/api/v1/plugins/${pluginId}/disable`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Authorization': authHeader,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(data, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error disabling plugin:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to disable plugin' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
46
frontend/src/app/api/v1/plugins/[pluginId]/enable/route.ts
Normal file
46
frontend/src/app/api/v1/plugins/[pluginId]/enable/route.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
|
|
||||||
|
export async function POST(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: { pluginId: string } }
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
// Extract authorization header from the incoming request
|
||||||
|
const authHeader = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!authHeader) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authorization header required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { pluginId } = params
|
||||||
|
|
||||||
|
// Make request to backend plugins enable endpoint
|
||||||
|
const baseUrl = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL
|
||||||
|
const url = `${baseUrl}/api/v1/plugins/${pluginId}/enable`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Authorization': authHeader,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(data, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error enabling plugin:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to enable plugin' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
46
frontend/src/app/api/v1/plugins/[pluginId]/load/route.ts
Normal file
46
frontend/src/app/api/v1/plugins/[pluginId]/load/route.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
|
|
||||||
|
export async function POST(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: { pluginId: string } }
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
// Extract authorization header from the incoming request
|
||||||
|
const authHeader = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!authHeader) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authorization header required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { pluginId } = params
|
||||||
|
|
||||||
|
// Make request to backend plugins load endpoint
|
||||||
|
const baseUrl = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL
|
||||||
|
const url = `${baseUrl}/api/v1/plugins/${pluginId}/load`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Authorization': authHeader,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(data, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error loading plugin:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to load plugin' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
48
frontend/src/app/api/v1/plugins/[pluginId]/route.ts
Normal file
48
frontend/src/app/api/v1/plugins/[pluginId]/route.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
|
|
||||||
|
export async function DELETE(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: { pluginId: string } }
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
// Extract authorization header from the incoming request
|
||||||
|
const authHeader = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!authHeader) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authorization header required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const { pluginId } = params
|
||||||
|
|
||||||
|
// Make request to backend plugins uninstall endpoint
|
||||||
|
const baseUrl = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL
|
||||||
|
const url = `${baseUrl}/api/v1/plugins/${pluginId}`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'DELETE',
|
||||||
|
headers: {
|
||||||
|
'Authorization': authHeader,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify(body)
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(data, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error uninstalling plugin:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to uninstall plugin' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
56
frontend/src/app/api/v1/plugins/[pluginId]/schema/route.ts
Normal file
56
frontend/src/app/api/v1/plugins/[pluginId]/schema/route.ts
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
|
|
||||||
|
export async function GET(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: { pluginId: string } }
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
// Extract authorization header from the incoming request
|
||||||
|
const authHeader = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!authHeader) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authorization header required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { pluginId } = params
|
||||||
|
|
||||||
|
// Make request to backend plugins schema endpoint
|
||||||
|
const baseUrl = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL
|
||||||
|
const url = `${baseUrl}/api/v1/plugins/${pluginId}/schema`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'Authorization': authHeader,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Cache-Control': 'no-cache, no-store, must-revalidate',
|
||||||
|
'Pragma': 'no-cache',
|
||||||
|
'Expires': '0'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(data, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextResponse = NextResponse.json(data)
|
||||||
|
|
||||||
|
// Add cache-busting headers to prevent schema caching
|
||||||
|
nextResponse.headers.set('Cache-Control', 'no-cache, no-store, must-revalidate')
|
||||||
|
nextResponse.headers.set('Pragma', 'no-cache')
|
||||||
|
nextResponse.headers.set('Expires', '0')
|
||||||
|
|
||||||
|
return nextResponse
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error getting plugin schema:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to get plugin schema' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,48 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
|
|
||||||
|
export async function POST(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: { pluginId: string } }
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
// Extract authorization header from the incoming request
|
||||||
|
const authHeader = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!authHeader) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authorization header required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const { pluginId } = params
|
||||||
|
|
||||||
|
// Make request to backend plugin test-credentials endpoint
|
||||||
|
const baseUrl = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL
|
||||||
|
const url = `${baseUrl}/api/v1/plugins/${pluginId}/test-credentials`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Authorization': authHeader,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify(body)
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(data, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error testing plugin credentials:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to test plugin credentials' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
46
frontend/src/app/api/v1/plugins/[pluginId]/unload/route.ts
Normal file
46
frontend/src/app/api/v1/plugins/[pluginId]/unload/route.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
|
|
||||||
|
export async function POST(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: { pluginId: string } }
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
// Extract authorization header from the incoming request
|
||||||
|
const authHeader = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!authHeader) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authorization header required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { pluginId } = params
|
||||||
|
|
||||||
|
// Make request to backend plugins unload endpoint
|
||||||
|
const baseUrl = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL
|
||||||
|
const url = `${baseUrl}/api/v1/plugins/${pluginId}/unload`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Authorization': authHeader,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(data, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error unloading plugin:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to unload plugin' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
55
frontend/src/app/api/v1/plugins/discover/route.ts
Normal file
55
frontend/src/app/api/v1/plugins/discover/route.ts
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
// Extract authorization header from the incoming request
|
||||||
|
const authHeader = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!authHeader) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authorization header required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get query parameters
|
||||||
|
const { searchParams } = new URL(request.url)
|
||||||
|
const query = searchParams.get('query') || ''
|
||||||
|
const tags = searchParams.get('tags') || ''
|
||||||
|
const category = searchParams.get('category') || ''
|
||||||
|
const limit = searchParams.get('limit') || '20'
|
||||||
|
|
||||||
|
// Build query string
|
||||||
|
const queryParams = new URLSearchParams()
|
||||||
|
if (query) queryParams.set('query', query)
|
||||||
|
if (tags) queryParams.set('tags', tags)
|
||||||
|
if (category) queryParams.set('category', category)
|
||||||
|
if (limit) queryParams.set('limit', limit)
|
||||||
|
|
||||||
|
// Make request to backend plugins discover endpoint
|
||||||
|
const baseUrl = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL
|
||||||
|
const url = `${baseUrl}/api/v1/plugins/discover?${queryParams.toString()}`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'Authorization': authHeader,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(data, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error discovering plugins:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to discover plugins' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
44
frontend/src/app/api/v1/plugins/install/route.ts
Normal file
44
frontend/src/app/api/v1/plugins/install/route.ts
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
// Extract authorization header from the incoming request
|
||||||
|
const authHeader = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!authHeader) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authorization header required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
|
||||||
|
// Make request to backend plugins install endpoint
|
||||||
|
const baseUrl = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL
|
||||||
|
const url = `${baseUrl}/api/v1/plugins/install`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Authorization': authHeader,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify(body)
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(data, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error installing plugin:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to install plugin' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
41
frontend/src/app/api/v1/plugins/installed/route.ts
Normal file
41
frontend/src/app/api/v1/plugins/installed/route.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
// Extract authorization header from the incoming request
|
||||||
|
const authHeader = request.headers.get('authorization')
|
||||||
|
|
||||||
|
if (!authHeader) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authorization header required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make request to backend plugins endpoint
|
||||||
|
const baseUrl = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL
|
||||||
|
const url = `${baseUrl}/api/v1/plugins/installed`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'Authorization': authHeader,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(data, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching installed plugins:', error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Failed to fetch installed plugins' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
19
frontend/src/app/plugins/page.tsx
Normal file
19
frontend/src/app/plugins/page.tsx
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
"use client"
|
||||||
|
|
||||||
|
import React from 'react';
|
||||||
|
import { PluginManager } from '@/components/plugins/PluginManager';
|
||||||
|
|
||||||
|
export default function PluginsPage() {
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<div>
|
||||||
|
<h1 className="text-3xl font-bold tracking-tight">Plugin Manager</h1>
|
||||||
|
<p className="text-muted-foreground">
|
||||||
|
Discover, install, and manage plugins to extend platform functionality
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<PluginManager />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
902
frontend/src/components/plugins/PluginConfigurationDialog.tsx
Normal file
902
frontend/src/components/plugins/PluginConfigurationDialog.tsx
Normal file
@@ -0,0 +1,902 @@
|
|||||||
|
/**
|
||||||
|
* Plugin Configuration Dialog - Configuration interface for plugins
|
||||||
|
*/
|
||||||
|
"use client"
|
||||||
|
|
||||||
|
import React, { useState, useEffect } from 'react';
|
||||||
|
import { Button } from '@/components/ui/button';
|
||||||
|
import { Input } from '@/components/ui/input';
|
||||||
|
import { Label } from '@/components/ui/label';
|
||||||
|
import { Textarea } from '@/components/ui/textarea';
|
||||||
|
import { Switch } from '@/components/ui/switch';
|
||||||
|
import { Alert, AlertDescription } from '@/components/ui/alert';
|
||||||
|
import {
|
||||||
|
Dialog,
|
||||||
|
DialogContent,
|
||||||
|
DialogDescription,
|
||||||
|
DialogHeader,
|
||||||
|
DialogTitle,
|
||||||
|
DialogFooter
|
||||||
|
} from '@/components/ui/dialog';
|
||||||
|
import {
|
||||||
|
Select,
|
||||||
|
SelectContent,
|
||||||
|
SelectItem,
|
||||||
|
SelectTrigger,
|
||||||
|
SelectValue,
|
||||||
|
} from '@/components/ui/select';
|
||||||
|
import {
|
||||||
|
Settings,
|
||||||
|
Save,
|
||||||
|
RotateCw,
|
||||||
|
AlertCircle,
|
||||||
|
CheckCircle,
|
||||||
|
Info,
|
||||||
|
Eye,
|
||||||
|
EyeOff
|
||||||
|
} from 'lucide-react';
|
||||||
|
import { usePlugin, type PluginInfo, type PluginConfiguration } from '../../contexts/PluginContext';
|
||||||
|
|
||||||
|
interface PluginConfigurationDialogProps {
|
||||||
|
plugin: PluginInfo;
|
||||||
|
open: boolean;
|
||||||
|
onOpenChange: (open: boolean) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FormField {
|
||||||
|
key: string;
|
||||||
|
type: string;
|
||||||
|
label: string;
|
||||||
|
description?: string;
|
||||||
|
required?: boolean;
|
||||||
|
default?: any;
|
||||||
|
options?: string[] | { value: string; label: string }[];
|
||||||
|
validation?: {
|
||||||
|
min?: number;
|
||||||
|
max?: number;
|
||||||
|
pattern?: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export const PluginConfigurationDialog: React.FC<PluginConfigurationDialogProps> = ({
|
||||||
|
plugin,
|
||||||
|
open,
|
||||||
|
onOpenChange
|
||||||
|
}) => {
|
||||||
|
const {
|
||||||
|
getPluginConfiguration,
|
||||||
|
savePluginConfiguration,
|
||||||
|
getPluginSchema,
|
||||||
|
pluginConfigurations
|
||||||
|
} = usePlugin();
|
||||||
|
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
const [saving, setSaving] = useState(false);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const [success, setSuccess] = useState(false);
|
||||||
|
const [schema, setSchema] = useState<any>(null);
|
||||||
|
const [config, setConfig] = useState<Record<string, any>>({});
|
||||||
|
const [formValues, setFormValues] = useState<Record<string, any>>({});
|
||||||
|
const [testingConnection, setTestingConnection] = useState(false);
|
||||||
|
const [testingCredentials, setTestingCredentials] = useState(false);
|
||||||
|
const [credentialsTestResult, setCredentialsTestResult] = useState<{
|
||||||
|
success: boolean;
|
||||||
|
message: string;
|
||||||
|
} | null>(null);
|
||||||
|
const [showApiToken, setShowApiToken] = useState(false);
|
||||||
|
|
||||||
|
// Load configuration and schema when dialog opens
|
||||||
|
useEffect(() => {
|
||||||
|
if (open && plugin.id) {
|
||||||
|
loadPluginData();
|
||||||
|
}
|
||||||
|
}, [open, plugin.id]);
|
||||||
|
|
||||||
|
// Reset success message after 5 seconds (extended for better visibility)
|
||||||
|
useEffect(() => {
|
||||||
|
if (success) {
|
||||||
|
const timer = setTimeout(() => setSuccess(false), 5000);
|
||||||
|
return () => clearTimeout(timer);
|
||||||
|
}
|
||||||
|
}, [success]);
|
||||||
|
|
||||||
|
const loadPluginData = async () => {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Load schema and current configuration
|
||||||
|
const [schemaData, configData] = await Promise.all([
|
||||||
|
getPluginSchema(plugin.id),
|
||||||
|
getPluginConfiguration(plugin.id)
|
||||||
|
]);
|
||||||
|
|
||||||
|
setSchema(schemaData);
|
||||||
|
setConfig(configData?.configuration || {});
|
||||||
|
|
||||||
|
// Initialize form values with current config or defaults
|
||||||
|
const initialValues: Record<string, any> = {};
|
||||||
|
if (schemaData?.properties) {
|
||||||
|
Object.entries(schemaData.properties).forEach(([key, field]: [string, any]) => {
|
||||||
|
initialValues[key] = configData?.configuration?.[key] ?? field.default ?? '';
|
||||||
|
});
|
||||||
|
}
|
||||||
|
setFormValues(initialValues);
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to load configuration');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSave = async () => {
|
||||||
|
setSaving(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Prepare config data - if API token is empty and we have existing config, preserve existing token
|
||||||
|
const configToSave = { ...formValues };
|
||||||
|
|
||||||
|
// If api_token is empty/missing and we have existing config with a token, preserve it
|
||||||
|
if ((!configToSave.api_token || configToSave.api_token.trim() === '') && config.api_token) {
|
||||||
|
configToSave.api_token = config.api_token; // Keep existing token
|
||||||
|
}
|
||||||
|
|
||||||
|
const success = await savePluginConfiguration(plugin.id, configToSave);
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
setConfig(configToSave);
|
||||||
|
setSuccess(true);
|
||||||
|
|
||||||
|
// Auto-close dialog after successful save (after a brief delay to show success message)
|
||||||
|
setTimeout(() => {
|
||||||
|
onOpenChange(false);
|
||||||
|
}, 2000);
|
||||||
|
} else {
|
||||||
|
setError('Failed to save configuration');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
const errorMsg = err instanceof Error ? err.message : 'Failed to save configuration';
|
||||||
|
setError(errorMsg);
|
||||||
|
} finally {
|
||||||
|
setSaving(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleFieldChange = (key: string, value: any) => {
|
||||||
|
setFormValues(prev => ({ ...prev, [key]: value }));
|
||||||
|
setSuccess(false); // Clear success message when editing
|
||||||
|
};
|
||||||
|
|
||||||
|
const shouldShowField = (key: string, field: any) => {
|
||||||
|
// Check if field has conditional visibility
|
||||||
|
if (field.depends_on) {
|
||||||
|
const dependsOnField = field.depends_on.field;
|
||||||
|
const dependsOnValue = field.depends_on.value;
|
||||||
|
const currentValue = formValues[dependsOnField];
|
||||||
|
|
||||||
|
return currentValue === dependsOnValue;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleTestConnection = async () => {
|
||||||
|
if (!schema?.validation?.connection_test) return;
|
||||||
|
|
||||||
|
const testConfig = schema.validation.connection_test;
|
||||||
|
const testData: Record<string, any> = {};
|
||||||
|
|
||||||
|
// Collect required fields for testing
|
||||||
|
testConfig.fields.forEach((fieldKey: string) => {
|
||||||
|
testData[fieldKey] = formValues[fieldKey];
|
||||||
|
});
|
||||||
|
|
||||||
|
setTestingConnection(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(testConfig.endpoint, {
|
||||||
|
method: testConfig.method,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Authorization': `Bearer ${localStorage.getItem('token')}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify(testData),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
if (response.ok && result.status === 'success') {
|
||||||
|
setSuccess(true);
|
||||||
|
setError(null);
|
||||||
|
setTimeout(() => setSuccess(false), 3000);
|
||||||
|
} else {
|
||||||
|
setError(result.message || testConfig.error_field || 'Connection test failed');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setError(`Connection test error: ${err instanceof Error ? err.message : 'Unknown error'}`);
|
||||||
|
} finally {
|
||||||
|
setTestingConnection(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleTestCredentials = async () => {
|
||||||
|
if (!formValues.zammad_url || !formValues.api_token) {
|
||||||
|
setCredentialsTestResult({
|
||||||
|
success: false,
|
||||||
|
message: 'Please provide both Zammad URL and API Token'
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setTestingCredentials(true);
|
||||||
|
setCredentialsTestResult(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Test credentials using Zammad API test endpoint
|
||||||
|
const response = await fetch(`/api/v1/plugins/${plugin.id}/test-credentials`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Authorization': `Bearer ${localStorage.getItem('token')}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
zammad_url: formValues.zammad_url,
|
||||||
|
api_token: formValues.api_token
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
if (response.ok && result.success) {
|
||||||
|
setCredentialsTestResult({
|
||||||
|
success: true,
|
||||||
|
message: result.message || 'Credentials verified successfully!'
|
||||||
|
});
|
||||||
|
// Auto-hide success message after 3 seconds
|
||||||
|
setTimeout(() => setCredentialsTestResult(null), 3000);
|
||||||
|
} else {
|
||||||
|
setCredentialsTestResult({
|
||||||
|
success: false,
|
||||||
|
message: result.message || result.error || 'Credential test failed'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setCredentialsTestResult({
|
||||||
|
success: false,
|
||||||
|
message: `Test failed: ${err instanceof Error ? err.message : 'Network error'}`
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
setTestingCredentials(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const renderNestedField = (fieldId: string, key: string, field: any, value: any, onChange: (value: any) => void) => {
|
||||||
|
const fieldType = field.type || 'string';
|
||||||
|
|
||||||
|
switch (fieldType) {
|
||||||
|
case 'boolean':
|
||||||
|
return (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<div className="flex items-center space-x-2">
|
||||||
|
<Switch
|
||||||
|
id={fieldId}
|
||||||
|
checked={Boolean(value)}
|
||||||
|
onCheckedChange={onChange}
|
||||||
|
/>
|
||||||
|
<Label htmlFor={fieldId} className="text-sm font-medium">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
</div>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground ml-6">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
case 'select':
|
||||||
|
case 'enum':
|
||||||
|
return (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor={fieldId} className="text-sm font-medium">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
<Select value={String(value || '')} onValueChange={onChange}>
|
||||||
|
<SelectTrigger>
|
||||||
|
<SelectValue placeholder={field.placeholder || `Select ${field.title || key}`} />
|
||||||
|
</SelectTrigger>
|
||||||
|
<SelectContent>
|
||||||
|
{(field.enum || field.options || []).map((option: any) => {
|
||||||
|
const optionValue = typeof option === 'object' ? option.value : option;
|
||||||
|
const optionLabel = typeof option === 'object' ? option.label : option;
|
||||||
|
return (
|
||||||
|
<SelectItem key={optionValue} value={String(optionValue)}>
|
||||||
|
{optionLabel}
|
||||||
|
</SelectItem>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</SelectContent>
|
||||||
|
</Select>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
case 'number':
|
||||||
|
case 'integer':
|
||||||
|
return (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor={fieldId} className="text-sm font-medium">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
<Input
|
||||||
|
id={fieldId}
|
||||||
|
type="number"
|
||||||
|
step={fieldType === 'integer' ? "1" : "0.1"}
|
||||||
|
value={String(value || '')}
|
||||||
|
onChange={(e) => onChange(fieldType === 'integer' ? parseInt(e.target.value) || field.default || 0 : parseFloat(e.target.value) || field.default || 0)}
|
||||||
|
placeholder={field.placeholder || `Enter ${field.title || key}`}
|
||||||
|
min={field.minimum}
|
||||||
|
max={field.maximum}
|
||||||
|
/>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
default:
|
||||||
|
return (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor={fieldId} className="text-sm font-medium">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
<Input
|
||||||
|
id={fieldId}
|
||||||
|
type={fieldType === 'email' ? 'email' : fieldType === 'password' ? 'password' : fieldType === 'url' ? 'url' : 'text'}
|
||||||
|
value={String(value || '')}
|
||||||
|
onChange={(e) => onChange(e.target.value)}
|
||||||
|
placeholder={field.placeholder || `Enter ${field.title || key}`}
|
||||||
|
pattern={field.pattern}
|
||||||
|
/>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const renderField = (key: string, field: any) => {
|
||||||
|
// Check if field should be shown based on dependencies
|
||||||
|
if (!shouldShowField(key, field)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const value = formValues[key] ?? field.default ?? '';
|
||||||
|
const fieldType = field.type || 'string';
|
||||||
|
|
||||||
|
switch (fieldType) {
|
||||||
|
case 'boolean':
|
||||||
|
return (
|
||||||
|
<div key={key} className="space-y-2">
|
||||||
|
<div className="flex items-center space-x-2">
|
||||||
|
<Switch
|
||||||
|
id={key}
|
||||||
|
checked={Boolean(value)}
|
||||||
|
onCheckedChange={(checked) => handleFieldChange(key, checked)}
|
||||||
|
/>
|
||||||
|
<Label htmlFor={key} className="text-sm font-medium">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
</div>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground ml-6">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
case 'select':
|
||||||
|
case 'enum':
|
||||||
|
return (
|
||||||
|
<div key={key} className="space-y-2">
|
||||||
|
<Label htmlFor={key} className="text-sm font-medium">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
<Select value={String(value)} onValueChange={(val) => handleFieldChange(key, val)}>
|
||||||
|
<SelectTrigger>
|
||||||
|
<SelectValue placeholder={field.placeholder || `Select ${field.title || key}`} />
|
||||||
|
</SelectTrigger>
|
||||||
|
<SelectContent>
|
||||||
|
{(field.enum || field.options || []).map((option: any) => {
|
||||||
|
const optionValue = typeof option === 'object' ? option.value : option;
|
||||||
|
const optionLabel = typeof option === 'object' ? option.label : option;
|
||||||
|
return (
|
||||||
|
<SelectItem key={optionValue} value={String(optionValue)}>
|
||||||
|
{optionLabel}
|
||||||
|
</SelectItem>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</SelectContent>
|
||||||
|
</Select>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
case 'textarea':
|
||||||
|
return (
|
||||||
|
<div key={key} className="space-y-2">
|
||||||
|
<Label htmlFor={key} className="text-sm font-medium">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
<Textarea
|
||||||
|
id={key}
|
||||||
|
value={String(value)}
|
||||||
|
onChange={(e) => handleFieldChange(key, e.target.value)}
|
||||||
|
placeholder={field.placeholder || `Enter ${field.title || key}`}
|
||||||
|
rows={field.rows || 3}
|
||||||
|
/>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
case 'number':
|
||||||
|
return (
|
||||||
|
<div key={key} className="space-y-2">
|
||||||
|
<Label htmlFor={key} className="text-sm font-medium">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
<Input
|
||||||
|
id={key}
|
||||||
|
type="number"
|
||||||
|
step="0.1"
|
||||||
|
value={String(value)}
|
||||||
|
onChange={(e) => handleFieldChange(key, parseFloat(e.target.value) || field.default || 0)}
|
||||||
|
placeholder={field.placeholder || `Enter ${field.title || key}`}
|
||||||
|
min={field.minimum}
|
||||||
|
max={field.maximum}
|
||||||
|
/>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
case 'integer':
|
||||||
|
return (
|
||||||
|
<div key={key} className="space-y-2">
|
||||||
|
<Label htmlFor={key} className="text-sm font-medium">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
<Input
|
||||||
|
id={key}
|
||||||
|
type="number"
|
||||||
|
step="1"
|
||||||
|
value={String(value)}
|
||||||
|
onChange={(e) => handleFieldChange(key, parseInt(e.target.value) || field.default || 0)}
|
||||||
|
placeholder={field.placeholder || `Enter ${field.title || key}`}
|
||||||
|
min={field.minimum}
|
||||||
|
max={field.maximum}
|
||||||
|
/>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
case 'password':
|
||||||
|
return (
|
||||||
|
<div key={key} className="space-y-2">
|
||||||
|
<Label htmlFor={key} className="text-sm font-medium">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
<Input
|
||||||
|
id={key}
|
||||||
|
type="password"
|
||||||
|
value={String(value)}
|
||||||
|
onChange={(e) => handleFieldChange(key, e.target.value)}
|
||||||
|
placeholder={field.placeholder || `Enter ${field.title || key}`}
|
||||||
|
/>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
case 'url':
|
||||||
|
return (
|
||||||
|
<div key={key} className="space-y-2">
|
||||||
|
<Label htmlFor={key} className="text-sm font-medium">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
<Input
|
||||||
|
id={key}
|
||||||
|
type="url"
|
||||||
|
value={String(value)}
|
||||||
|
onChange={(e) => handleFieldChange(key, e.target.value)}
|
||||||
|
placeholder={field.placeholder || `Enter ${field.title || key}`}
|
||||||
|
pattern={field.pattern}
|
||||||
|
/>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
case 'object':
|
||||||
|
// Initialize object with defaults if not already set
|
||||||
|
if (!formValues[key] || typeof formValues[key] !== 'object') {
|
||||||
|
const defaultObj: Record<string, any> = {};
|
||||||
|
if (field.properties) {
|
||||||
|
Object.entries(field.properties).forEach(([k, f]: [string, any]) => {
|
||||||
|
defaultObj[k] = f.default;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
handleFieldChange(key, defaultObj);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div key={key} className="space-y-4 p-4 border rounded-lg bg-gray-50">
|
||||||
|
<div>
|
||||||
|
<Label className="text-sm font-semibold">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground mt-1">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<div className="space-y-3 ml-4">
|
||||||
|
{field.properties && Object.entries(field.properties).map(([nestedKey, nestedField]: [string, any]) => {
|
||||||
|
const nestedValue = (formValues[key] && typeof formValues[key] === 'object')
|
||||||
|
? formValues[key][nestedKey]
|
||||||
|
: nestedField.default;
|
||||||
|
|
||||||
|
const handleNestedChange = (nestedValue: any) => {
|
||||||
|
const currentObject = (formValues[key] && typeof formValues[key] === 'object') ? formValues[key] : {};
|
||||||
|
handleFieldChange(key, {
|
||||||
|
...currentObject,
|
||||||
|
[nestedKey]: nestedValue
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div key={`${key}.${nestedKey}`}>
|
||||||
|
{renderNestedField(`${key}.${nestedKey}`, nestedKey, nestedField, nestedValue, handleNestedChange)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
default: // string, email, etc.
|
||||||
|
return (
|
||||||
|
<div key={key} className="space-y-2">
|
||||||
|
<Label htmlFor={key} className="text-sm font-medium">
|
||||||
|
{field.title || field.label || key}
|
||||||
|
{field.required && <span className="text-red-500 ml-1">*</span>}
|
||||||
|
</Label>
|
||||||
|
<Input
|
||||||
|
id={key}
|
||||||
|
type={fieldType === 'email' ? 'email' : 'text'}
|
||||||
|
value={String(value)}
|
||||||
|
onChange={(e) => handleFieldChange(key, e.target.value)}
|
||||||
|
placeholder={field.placeholder || `Enter ${field.title || key}`}
|
||||||
|
pattern={field.pattern}
|
||||||
|
/>
|
||||||
|
{field.description && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{field.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const renderFieldGroup = (group: any, fields: Record<string, any>) => {
|
||||||
|
const groupFields = group.fields.map((fieldKey: string) => {
|
||||||
|
const field = fields[fieldKey];
|
||||||
|
if (!field) return null;
|
||||||
|
return renderField(fieldKey, field);
|
||||||
|
}).filter(Boolean);
|
||||||
|
|
||||||
|
if (groupFields.length === 0) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div key={group.title} className="space-y-4">
|
||||||
|
<h4 className="font-medium text-sm text-muted-foreground border-b pb-2">
|
||||||
|
{group.title}
|
||||||
|
</h4>
|
||||||
|
<div className="space-y-4 pl-4">
|
||||||
|
{groupFields}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const hasChanges = JSON.stringify(formValues) !== JSON.stringify(config);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||||
|
<DialogContent className="max-w-2xl max-h-[80vh] overflow-y-auto">
|
||||||
|
<DialogHeader>
|
||||||
|
<DialogTitle className="flex items-center gap-2">
|
||||||
|
<Settings className="h-5 w-5" />
|
||||||
|
Configure {plugin.name}
|
||||||
|
</DialogTitle>
|
||||||
|
<DialogDescription>
|
||||||
|
Configure the settings and behavior for the {plugin.name} plugin.
|
||||||
|
{plugin.version && ` (version ${plugin.version})`}
|
||||||
|
</DialogDescription>
|
||||||
|
</DialogHeader>
|
||||||
|
|
||||||
|
<div className="space-y-4">
|
||||||
|
{/* Success Message */}
|
||||||
|
{success && (
|
||||||
|
<Alert className="border-green-200 bg-green-50 text-green-800 dark:border-green-800 dark:bg-green-900/20 dark:text-green-200">
|
||||||
|
<CheckCircle className="h-4 w-4 text-green-600 dark:text-green-400" />
|
||||||
|
<AlertDescription className="font-medium">
|
||||||
|
✅ Configuration saved successfully! All settings have been saved and encrypted.
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Error Message */}
|
||||||
|
{error && (
|
||||||
|
<Alert variant="destructive">
|
||||||
|
<AlertCircle className="h-4 w-4" />
|
||||||
|
<AlertDescription>{error}</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Loading State */}
|
||||||
|
{loading ? (
|
||||||
|
<div className="flex items-center justify-center py-8">
|
||||||
|
<RotateCw className="h-6 w-6 animate-spin mr-2" />
|
||||||
|
Loading configuration...
|
||||||
|
</div>
|
||||||
|
) : schema?.properties ? (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Render grouped fields if field_groups are defined */}
|
||||||
|
{schema.field_groups ? (
|
||||||
|
schema.field_groups.map((group: any) =>
|
||||||
|
renderFieldGroup(group, schema.properties)
|
||||||
|
)
|
||||||
|
) : (
|
||||||
|
/* Custom rendering for Zammad plugin */
|
||||||
|
plugin.name?.toLowerCase() === 'zammad' ? (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Zammad Credentials Section with Test Button */}
|
||||||
|
<div className="space-y-4 p-4 border rounded-lg bg-blue-50 dark:bg-blue-950">
|
||||||
|
<div>
|
||||||
|
<h4 className="font-medium text-sm text-blue-800 dark:text-blue-200 border-b border-blue-200 dark:border-blue-800 pb-2">
|
||||||
|
Zammad Connection Settings
|
||||||
|
</h4>
|
||||||
|
</div>
|
||||||
|
<div className="space-y-4 ml-2">
|
||||||
|
{/* Zammad URL Field */}
|
||||||
|
{schema.properties.zammad_url && renderField('zammad_url', schema.properties.zammad_url)}
|
||||||
|
|
||||||
|
{/* API Token Field with Show/Hide Toggle */}
|
||||||
|
{schema.properties.api_token && (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor="api_token" className="text-sm font-medium">
|
||||||
|
{schema.properties.api_token.title || 'API Token'}
|
||||||
|
<span className="text-red-500 ml-1">*</span>
|
||||||
|
</Label>
|
||||||
|
<div className="relative">
|
||||||
|
<Input
|
||||||
|
id="api_token"
|
||||||
|
type={showApiToken ? "text" : "password"}
|
||||||
|
value={String(formValues.api_token || '')}
|
||||||
|
onChange={(e) => handleFieldChange('api_token', e.target.value)}
|
||||||
|
placeholder={config.api_token ? "••••••••••••••••••••••••••••••••••••••• (saved)" : "Enter API Token"}
|
||||||
|
className="pr-10"
|
||||||
|
/>
|
||||||
|
<Button
|
||||||
|
type="button"
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
className="absolute right-0 top-0 h-full px-3 py-2 hover:bg-transparent"
|
||||||
|
onClick={() => setShowApiToken(!showApiToken)}
|
||||||
|
>
|
||||||
|
{showApiToken ? (
|
||||||
|
<EyeOff className="h-4 w-4" />
|
||||||
|
) : (
|
||||||
|
<Eye className="h-4 w-4" />
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
{schema.properties.api_token.description && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{schema.properties.api_token.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
{config.api_token && (
|
||||||
|
<p className="text-xs text-blue-600 dark:text-blue-400">
|
||||||
|
💡 Leave empty to keep your existing saved token
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Test Credentials Button and Result */}
|
||||||
|
{formValues.zammad_url && formValues.api_token && (
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Button
|
||||||
|
onClick={handleTestCredentials}
|
||||||
|
variant="outline"
|
||||||
|
disabled={testingCredentials}
|
||||||
|
className="flex items-center gap-2"
|
||||||
|
size="sm"
|
||||||
|
>
|
||||||
|
{testingCredentials ? (
|
||||||
|
<>
|
||||||
|
<RotateCw className="h-4 w-4 animate-spin" />
|
||||||
|
Testing Credentials...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<CheckCircle className="h-4 w-4" />
|
||||||
|
Test Credentials
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
|
||||||
|
{/* Credentials test result */}
|
||||||
|
{credentialsTestResult && (
|
||||||
|
<Alert variant={credentialsTestResult.success ? "default" : "destructive"} className="mt-2">
|
||||||
|
{credentialsTestResult.success ? (
|
||||||
|
<CheckCircle className="h-4 w-4" />
|
||||||
|
) : (
|
||||||
|
<AlertCircle className="h-4 w-4" />
|
||||||
|
)}
|
||||||
|
<AlertDescription>
|
||||||
|
{credentialsTestResult.message}
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Chatbot Selection */}
|
||||||
|
{schema.properties.chatbot_id && (
|
||||||
|
<div className="space-y-4">
|
||||||
|
<h4 className="font-medium text-sm text-muted-foreground border-b pb-2">
|
||||||
|
AI Integration
|
||||||
|
</h4>
|
||||||
|
<div className="ml-2">
|
||||||
|
{renderField('chatbot_id', schema.properties.chatbot_id)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Render other object fields */}
|
||||||
|
{Object.entries(schema.properties).map(([key, field]: [string, any]) => {
|
||||||
|
if (['zammad_url', 'api_token', 'chatbot_id'].includes(key)) {
|
||||||
|
return null; // Already rendered above
|
||||||
|
}
|
||||||
|
return renderField(key, field);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
/* Fallback to rendering all fields without groups */
|
||||||
|
<div className="space-y-4">
|
||||||
|
{Object.entries(schema.properties).map(([key, field]: [string, any]) =>
|
||||||
|
renderField(key, field)
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Connection test button if validation is configured */}
|
||||||
|
{schema.validation?.connection_test && (
|
||||||
|
<div className="pt-4 border-t">
|
||||||
|
<Button
|
||||||
|
onClick={handleTestConnection}
|
||||||
|
variant="outline"
|
||||||
|
disabled={testingConnection || !formValues[schema.validation.connection_test.fields[0]] || !formValues[schema.validation.connection_test.fields[1]]}
|
||||||
|
className="flex items-center gap-2"
|
||||||
|
>
|
||||||
|
{testingConnection ? (
|
||||||
|
<>
|
||||||
|
<RotateCw className="h-4 w-4 animate-spin" />
|
||||||
|
Testing Connection...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<CheckCircle className="h-4 w-4" />
|
||||||
|
Test Connection
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<Alert>
|
||||||
|
<Info className="h-4 w-4" />
|
||||||
|
<AlertDescription>
|
||||||
|
This plugin does not have any configurable settings, or the configuration schema is not available.
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<DialogFooter>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => onOpenChange(false)}
|
||||||
|
disabled={saving}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</Button>
|
||||||
|
{schema?.properties && (
|
||||||
|
<Button
|
||||||
|
onClick={handleSave}
|
||||||
|
disabled={saving || !hasChanges}
|
||||||
|
className="flex items-center gap-2"
|
||||||
|
>
|
||||||
|
{saving ? (
|
||||||
|
<>
|
||||||
|
<RotateCw className="h-4 w-4 animate-spin" />
|
||||||
|
Saving...
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<Save className="h-4 w-4" />
|
||||||
|
Save Configuration
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</DialogFooter>
|
||||||
|
</DialogContent>
|
||||||
|
</Dialog>
|
||||||
|
);
|
||||||
|
};
|
||||||
448
frontend/src/components/plugins/PluginManager.tsx
Normal file
448
frontend/src/components/plugins/PluginManager.tsx
Normal file
@@ -0,0 +1,448 @@
|
|||||||
|
/**
|
||||||
|
* Plugin Manager - Main plugin management interface
|
||||||
|
*/
|
||||||
|
"use client"
|
||||||
|
|
||||||
|
import React, { useState, useEffect } from 'react';
|
||||||
|
import { Button } from '@/components/ui/button';
|
||||||
|
import { Badge } from '@/components/ui/badge';
|
||||||
|
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
|
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
|
||||||
|
import { Input } from '@/components/ui/input';
|
||||||
|
import { Alert, AlertDescription } from '@/components/ui/alert';
|
||||||
|
import {
|
||||||
|
Search,
|
||||||
|
Filter,
|
||||||
|
Download,
|
||||||
|
Trash2,
|
||||||
|
Play,
|
||||||
|
Square,
|
||||||
|
Settings,
|
||||||
|
Info,
|
||||||
|
RotateCw,
|
||||||
|
Store,
|
||||||
|
LayoutDashboard,
|
||||||
|
CheckCircle,
|
||||||
|
XCircle,
|
||||||
|
Clock,
|
||||||
|
AlertCircle
|
||||||
|
} from 'lucide-react';
|
||||||
|
import { usePlugin, type PluginInfo, type AvailablePlugin } from '../../contexts/PluginContext';
|
||||||
|
import { useAuth } from '../../contexts/AuthContext';
|
||||||
|
import { PluginConfigurationDialog } from './PluginConfigurationDialog';
|
||||||
|
|
||||||
|
interface PluginCardProps {
|
||||||
|
plugin: PluginInfo;
|
||||||
|
onAction: (action: string, plugin: PluginInfo) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
const InstalledPluginCard: React.FC<PluginCardProps> = ({ plugin, onAction }) => {
|
||||||
|
const getStatusBadge = (status: string) => {
|
||||||
|
const variants = {
|
||||||
|
'enabled': { variant: 'default' as const, icon: CheckCircle, text: 'Enabled' },
|
||||||
|
'disabled': { variant: 'secondary' as const, icon: XCircle, text: 'Disabled' },
|
||||||
|
'installed': { variant: 'outline' as const, icon: Clock, text: 'Installed' },
|
||||||
|
'uninstalled': { variant: 'destructive' as const, icon: AlertCircle, text: 'Uninstalled' }
|
||||||
|
};
|
||||||
|
|
||||||
|
const config = variants[status as keyof typeof variants] || variants.installed;
|
||||||
|
const IconComponent = config.icon;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Badge variant={config.variant} className="flex items-center gap-1">
|
||||||
|
<IconComponent className="h-3 w-3" />
|
||||||
|
{config.text}
|
||||||
|
</Badge>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div>
|
||||||
|
<CardTitle className="text-lg">{plugin.name}</CardTitle>
|
||||||
|
<CardDescription className="mt-1">
|
||||||
|
v{plugin.version} • {plugin.author}
|
||||||
|
</CardDescription>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
{getStatusBadge(plugin.status)}
|
||||||
|
{plugin.loaded && (
|
||||||
|
<Badge variant="outline" className="text-green-600">
|
||||||
|
Loaded
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<p className="text-sm text-muted-foreground mb-4">{plugin.description}</p>
|
||||||
|
|
||||||
|
<div className="flex flex-wrap gap-2">
|
||||||
|
{plugin.status === 'enabled' ? (
|
||||||
|
<>
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => onAction('disable', plugin)}
|
||||||
|
className="flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Square className="h-4 w-4" />
|
||||||
|
Disable
|
||||||
|
</Button>
|
||||||
|
{!plugin.loaded ? (
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
onClick={() => onAction('load', plugin)}
|
||||||
|
className="flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Play className="h-4 w-4" />
|
||||||
|
Load
|
||||||
|
</Button>
|
||||||
|
) : (
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => onAction('unload', plugin)}
|
||||||
|
className="flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Square className="h-4 w-4" />
|
||||||
|
Unload
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
onClick={() => onAction('enable', plugin)}
|
||||||
|
className="flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Play className="h-4 w-4" />
|
||||||
|
Enable
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => onAction('configure', plugin)}
|
||||||
|
className="flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Settings className="h-4 w-4" />
|
||||||
|
Configure
|
||||||
|
</Button>
|
||||||
|
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="destructive"
|
||||||
|
onClick={() => onAction('uninstall', plugin)}
|
||||||
|
className="flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Trash2 className="h-4 w-4" />
|
||||||
|
Uninstall
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
interface AvailablePluginCardProps {
|
||||||
|
plugin: AvailablePlugin;
|
||||||
|
onInstall: (plugin: AvailablePlugin) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
const AvailablePluginCard: React.FC<AvailablePluginCardProps> = ({ plugin, onInstall }) => {
|
||||||
|
return (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div>
|
||||||
|
<CardTitle className="text-lg">{plugin.name}</CardTitle>
|
||||||
|
<CardDescription className="mt-1">
|
||||||
|
v{plugin.version} • {plugin.author}
|
||||||
|
</CardDescription>
|
||||||
|
</div>
|
||||||
|
<Badge variant="outline">{plugin.category}</Badge>
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<p className="text-sm text-muted-foreground mb-3">{plugin.description}</p>
|
||||||
|
|
||||||
|
<div className="flex flex-wrap gap-1 mb-4">
|
||||||
|
{plugin.tags.map((tag) => (
|
||||||
|
<Badge key={tag} variant="secondary" className="text-xs">
|
||||||
|
{tag}
|
||||||
|
</Badge>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div className="text-sm text-muted-foreground">
|
||||||
|
{plugin.local_status.installed ? (
|
||||||
|
<div className="flex items-center gap-1 text-green-600">
|
||||||
|
<CheckCircle className="h-4 w-4" />
|
||||||
|
Installed {plugin.local_status.version && `(v${plugin.local_status.version})`}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
'Not installed'
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{!plugin.local_status.installed ? (
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
onClick={() => onInstall(plugin)}
|
||||||
|
className="flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<Download className="h-4 w-4" />
|
||||||
|
Install
|
||||||
|
</Button>
|
||||||
|
) : plugin.local_status.update_available ? (
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => onInstall(plugin)}
|
||||||
|
className="flex items-center gap-1"
|
||||||
|
>
|
||||||
|
<RotateCw className="h-4 w-4" />
|
||||||
|
Update
|
||||||
|
</Button>
|
||||||
|
) : null}
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const PluginManager: React.FC = () => {
|
||||||
|
const { user, token } = useAuth();
|
||||||
|
const {
|
||||||
|
installedPlugins,
|
||||||
|
availablePlugins,
|
||||||
|
loading,
|
||||||
|
error,
|
||||||
|
refreshInstalledPlugins,
|
||||||
|
searchAvailablePlugins,
|
||||||
|
installPlugin,
|
||||||
|
uninstallPlugin,
|
||||||
|
enablePlugin,
|
||||||
|
disablePlugin,
|
||||||
|
loadPlugin,
|
||||||
|
unloadPlugin,
|
||||||
|
} = usePlugin();
|
||||||
|
|
||||||
|
const [activeTab, setActiveTab] = useState<string>('installed');
|
||||||
|
const [searchQuery, setSearchQuery] = useState<string>('');
|
||||||
|
const [selectedCategory, setSelectedCategory] = useState<string>('');
|
||||||
|
const [configuringPlugin, setConfiguringPlugin] = useState<PluginInfo | null>(null);
|
||||||
|
|
||||||
|
// Load initial data only when authenticated
|
||||||
|
useEffect(() => {
|
||||||
|
if (user && token) {
|
||||||
|
refreshInstalledPlugins();
|
||||||
|
}
|
||||||
|
}, [user, token, refreshInstalledPlugins]);
|
||||||
|
|
||||||
|
// Load available plugins when switching to discover tab and authenticated
|
||||||
|
useEffect(() => {
|
||||||
|
if (activeTab === 'discover' && user && token) {
|
||||||
|
searchAvailablePlugins();
|
||||||
|
}
|
||||||
|
}, [activeTab, user, token, searchAvailablePlugins]);
|
||||||
|
|
||||||
|
const handlePluginAction = async (action: string, plugin: PluginInfo) => {
|
||||||
|
try {
|
||||||
|
switch (action) {
|
||||||
|
case 'enable':
|
||||||
|
await enablePlugin(plugin.id);
|
||||||
|
break;
|
||||||
|
case 'disable':
|
||||||
|
await disablePlugin(plugin.id);
|
||||||
|
break;
|
||||||
|
case 'load':
|
||||||
|
await loadPlugin(plugin.id);
|
||||||
|
break;
|
||||||
|
case 'unload':
|
||||||
|
await unloadPlugin(plugin.id);
|
||||||
|
break;
|
||||||
|
case 'uninstall':
|
||||||
|
if (confirm(`Are you sure you want to uninstall ${plugin.name}?`)) {
|
||||||
|
await uninstallPlugin(plugin.id);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'configure':
|
||||||
|
setConfiguringPlugin(plugin);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Failed to ${action} plugin:`, err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleInstallPlugin = async (plugin: AvailablePlugin) => {
|
||||||
|
try {
|
||||||
|
await installPlugin(plugin.id, plugin.version);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to install plugin:', err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const filteredAvailablePlugins = availablePlugins.filter(plugin => {
|
||||||
|
const matchesSearch = searchQuery === '' ||
|
||||||
|
plugin.name.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||||
|
plugin.description.toLowerCase().includes(searchQuery.toLowerCase());
|
||||||
|
|
||||||
|
const matchesCategory = selectedCategory === '' || plugin.category === selectedCategory;
|
||||||
|
|
||||||
|
return matchesSearch && matchesCategory;
|
||||||
|
});
|
||||||
|
|
||||||
|
const categories = Array.from(new Set(availablePlugins.map(p => p.category)));
|
||||||
|
|
||||||
|
// Show authentication required message if not authenticated
|
||||||
|
if (!user || !token) {
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<Alert>
|
||||||
|
<Info className="h-4 w-4" />
|
||||||
|
<AlertDescription>
|
||||||
|
Please <a href="/login" className="underline">log in</a> to access the plugin manager.
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{error && (
|
||||||
|
<Alert variant="destructive">
|
||||||
|
<AlertCircle className="h-4 w-4" />
|
||||||
|
<AlertDescription>{error}</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<Tabs value={activeTab} onValueChange={setActiveTab}>
|
||||||
|
<TabsList>
|
||||||
|
<TabsTrigger value="installed" className="flex items-center gap-2">
|
||||||
|
<LayoutDashboard className="h-4 w-4" />
|
||||||
|
Installed ({installedPlugins.length})
|
||||||
|
</TabsTrigger>
|
||||||
|
<TabsTrigger value="discover" className="flex items-center gap-2">
|
||||||
|
<Store className="h-4 w-4" />
|
||||||
|
Discover
|
||||||
|
</TabsTrigger>
|
||||||
|
</TabsList>
|
||||||
|
|
||||||
|
<TabsContent value="installed" className="space-y-4">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<h3 className="text-lg font-semibold">Installed Plugins</h3>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
onClick={refreshInstalledPlugins}
|
||||||
|
disabled={loading}
|
||||||
|
className="flex items-center gap-2"
|
||||||
|
>
|
||||||
|
<RotateCw className={`h-4 w-4 ${loading ? 'animate-spin' : ''}`} />
|
||||||
|
Refresh
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{loading ? (
|
||||||
|
<div className="flex items-center justify-center py-8">
|
||||||
|
<RotateCw className="h-6 w-6 animate-spin mr-2" />
|
||||||
|
Loading plugins...
|
||||||
|
</div>
|
||||||
|
) : installedPlugins.length === 0 ? (
|
||||||
|
<Card>
|
||||||
|
<CardContent className="py-8 text-center">
|
||||||
|
<Store className="h-12 w-12 mx-auto mb-4 text-muted-foreground" />
|
||||||
|
<h3 className="text-lg font-semibold mb-2">No Plugins Installed</h3>
|
||||||
|
<p className="text-muted-foreground mb-4">
|
||||||
|
Get started by discovering and installing plugins from the marketplace.
|
||||||
|
</p>
|
||||||
|
<Button onClick={() => setActiveTab('discover')}>
|
||||||
|
Discover Plugins
|
||||||
|
</Button>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
) : (
|
||||||
|
<div className="grid gap-4 md:grid-cols-2 lg:grid-cols-3">
|
||||||
|
{installedPlugins.map((plugin) => (
|
||||||
|
<InstalledPluginCard
|
||||||
|
key={plugin.id}
|
||||||
|
plugin={plugin}
|
||||||
|
onAction={handlePluginAction}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</TabsContent>
|
||||||
|
|
||||||
|
<TabsContent value="discover" className="space-y-4">
|
||||||
|
<div className="flex items-center gap-4">
|
||||||
|
<div className="flex-1">
|
||||||
|
<Input
|
||||||
|
placeholder="Search plugins..."
|
||||||
|
value={searchQuery}
|
||||||
|
onChange={(e) => setSearchQuery(e.target.value)}
|
||||||
|
className="max-w-sm"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<select
|
||||||
|
value={selectedCategory}
|
||||||
|
onChange={(e) => setSelectedCategory(e.target.value)}
|
||||||
|
className="px-3 py-2 border rounded-md"
|
||||||
|
>
|
||||||
|
<option value="">All Categories</option>
|
||||||
|
{categories.map(category => (
|
||||||
|
<option key={category} value={category}>{category}</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{loading ? (
|
||||||
|
<div className="flex items-center justify-center py-8">
|
||||||
|
<RotateCw className="h-6 w-6 animate-spin mr-2" />
|
||||||
|
Loading available plugins...
|
||||||
|
</div>
|
||||||
|
) : filteredAvailablePlugins.length === 0 ? (
|
||||||
|
<Card>
|
||||||
|
<CardContent className="py-8 text-center">
|
||||||
|
<Search className="h-12 w-12 mx-auto mb-4 text-muted-foreground" />
|
||||||
|
<h3 className="text-lg font-semibold mb-2">No Plugins Found</h3>
|
||||||
|
<p className="text-muted-foreground">
|
||||||
|
{searchQuery || selectedCategory
|
||||||
|
? 'Try adjusting your search criteria.'
|
||||||
|
: 'The plugin marketplace appears to be empty or unavailable.'
|
||||||
|
}
|
||||||
|
</p>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
) : (
|
||||||
|
<div className="grid gap-4 md:grid-cols-2 lg:grid-cols-3">
|
||||||
|
{filteredAvailablePlugins.map((plugin) => (
|
||||||
|
<AvailablePluginCard
|
||||||
|
key={plugin.id}
|
||||||
|
plugin={plugin}
|
||||||
|
onInstall={handleInstallPlugin}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</TabsContent>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
|
{/* Configuration Dialog */}
|
||||||
|
{configuringPlugin && (
|
||||||
|
<PluginConfigurationDialog
|
||||||
|
plugin={configuringPlugin}
|
||||||
|
open={!!configuringPlugin}
|
||||||
|
onOpenChange={(open) => !open && setConfiguringPlugin(null)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
253
frontend/src/components/plugins/PluginNavigation.tsx
Normal file
253
frontend/src/components/plugins/PluginNavigation.tsx
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
/**
|
||||||
|
* Plugin Navigation - Utility components for plugin navigation integration
|
||||||
|
*/
|
||||||
|
"use client"
|
||||||
|
|
||||||
|
import React from 'react';
|
||||||
|
import Link from 'next/link';
|
||||||
|
import { Badge } from '@/components/ui/badge';
|
||||||
|
import { Button } from '@/components/ui/button';
|
||||||
|
import {
|
||||||
|
LayoutDashboard,
|
||||||
|
Settings,
|
||||||
|
BarChart3,
|
||||||
|
Ticket,
|
||||||
|
Users,
|
||||||
|
Bell,
|
||||||
|
Shield,
|
||||||
|
Puzzle
|
||||||
|
} from 'lucide-react';
|
||||||
|
import { usePlugin, type PluginInfo, type PluginPage } from '../../contexts/PluginContext';
|
||||||
|
|
||||||
|
interface PluginNavigationProps {
|
||||||
|
className?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const getIconForPage = (pageName: string, pluginId: string) => {
|
||||||
|
const iconMap = {
|
||||||
|
dashboard: LayoutDashboard,
|
||||||
|
settings: Settings,
|
||||||
|
analytics: BarChart3,
|
||||||
|
tickets: Ticket,
|
||||||
|
users: Users,
|
||||||
|
notifications: Bell,
|
||||||
|
security: Shield,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Try to match by page name first
|
||||||
|
for (const [key, Icon] of Object.entries(iconMap)) {
|
||||||
|
if (pageName.toLowerCase().includes(key)) {
|
||||||
|
return Icon;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to match by plugin type
|
||||||
|
if (pluginId.includes('zammad') || pluginId.includes('helpdesk')) {
|
||||||
|
return Ticket;
|
||||||
|
}
|
||||||
|
if (pluginId.includes('discord') || pluginId.includes('slack')) {
|
||||||
|
return Bell;
|
||||||
|
}
|
||||||
|
if (pluginId.includes('analytics') || pluginId.includes('reporting')) {
|
||||||
|
return BarChart3;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default plugin icon
|
||||||
|
return Puzzle;
|
||||||
|
};
|
||||||
|
|
||||||
|
const PluginNavItem: React.FC<{
|
||||||
|
plugin: PluginInfo;
|
||||||
|
pages: PluginPage[];
|
||||||
|
currentPath: string;
|
||||||
|
}> = ({ plugin, pages, currentPath }) => {
|
||||||
|
const getPluginStatusVariant = () => {
|
||||||
|
if (!plugin.loaded) return 'secondary' as const;
|
||||||
|
if (plugin.health?.status === 'healthy') return 'default' as const;
|
||||||
|
if (plugin.health?.status === 'warning') return 'outline' as const;
|
||||||
|
if (plugin.health?.status === 'error') return 'destructive' as const;
|
||||||
|
return 'default' as const;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (pages.length === 0) return null;
|
||||||
|
|
||||||
|
if (pages.length === 1) {
|
||||||
|
const page = pages[0];
|
||||||
|
const IconComponent = getIconForPage(page.name, plugin.id);
|
||||||
|
const isActive = currentPath.startsWith(`/plugins/${plugin.id}`);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Link href={`/plugins/${plugin.id}${page.path}`}>
|
||||||
|
<Button
|
||||||
|
variant={isActive ? "default" : "ghost"}
|
||||||
|
className="w-full justify-start"
|
||||||
|
>
|
||||||
|
<IconComponent className="mr-2 h-4 w-4" />
|
||||||
|
{plugin.name}
|
||||||
|
<Badge variant={getPluginStatusVariant()} className="ml-auto">
|
||||||
|
{plugin.loaded ? 'loaded' : plugin.status}
|
||||||
|
</Badge>
|
||||||
|
</Button>
|
||||||
|
</Link>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Multi-page plugin - show as expandable or use a default page
|
||||||
|
const mainPage = pages.find(p => p.path === '/' || p.name.toLowerCase().includes('dashboard')) || pages[0];
|
||||||
|
const IconComponent = getIconForPage(mainPage.name, plugin.id);
|
||||||
|
const isActive = currentPath.startsWith(`/plugins/${plugin.id}`);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-1">
|
||||||
|
<Link href={`/plugins/${plugin.id}${mainPage.path}`}>
|
||||||
|
<Button
|
||||||
|
variant={isActive ? "default" : "ghost"}
|
||||||
|
className="w-full justify-start"
|
||||||
|
>
|
||||||
|
<IconComponent className="mr-2 h-4 w-4" />
|
||||||
|
{plugin.name}
|
||||||
|
<Badge variant={getPluginStatusVariant()} className="ml-auto">
|
||||||
|
{plugin.loaded ? 'loaded' : plugin.status}
|
||||||
|
</Badge>
|
||||||
|
</Button>
|
||||||
|
</Link>
|
||||||
|
|
||||||
|
{/* Additional pages as sub-items */}
|
||||||
|
{pages.length > 1 && isActive && (
|
||||||
|
<div className="ml-4 space-y-1">
|
||||||
|
{pages.filter(p => p !== mainPage).map((page) => {
|
||||||
|
const PageIcon = getIconForPage(page.name, plugin.id);
|
||||||
|
const pageActive = currentPath === `/plugins/${plugin.id}${page.path}`;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Link key={page.path} href={`/plugins/${plugin.id}${page.path}`}>
|
||||||
|
<Button
|
||||||
|
variant={pageActive ? "default" : "ghost"}
|
||||||
|
size="sm"
|
||||||
|
className="w-full justify-start"
|
||||||
|
>
|
||||||
|
<PageIcon className="mr-2 h-3 w-3" />
|
||||||
|
{page.title || page.name}
|
||||||
|
</Button>
|
||||||
|
</Link>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const PluginNavigation: React.FC<PluginNavigationProps> = ({
|
||||||
|
className = ""
|
||||||
|
}) => {
|
||||||
|
const { installedPlugins, getPluginPages, isPluginPageAuthorized } = usePlugin();
|
||||||
|
|
||||||
|
// Filter to loaded plugins with accessible pages
|
||||||
|
const availablePlugins = installedPlugins.filter(plugin => {
|
||||||
|
if (plugin.status !== 'enabled' || !plugin.loaded) return false;
|
||||||
|
|
||||||
|
const pages = getPluginPages(plugin.id);
|
||||||
|
if (pages.length === 0) return false;
|
||||||
|
|
||||||
|
// Check if user has access to at least one page
|
||||||
|
return pages.some(page => isPluginPageAuthorized(plugin.id, page.path));
|
||||||
|
});
|
||||||
|
|
||||||
|
if (availablePlugins.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<nav className={`space-y-2 ${className}`}>
|
||||||
|
<div className="text-sm font-medium text-muted-foreground">
|
||||||
|
Plugin Pages
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="space-y-1">
|
||||||
|
{availablePlugins.map((plugin) => {
|
||||||
|
const pages = getPluginPages(plugin.id).filter(page =>
|
||||||
|
isPluginPageAuthorized(plugin.id, page.path)
|
||||||
|
);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<PluginNavItem
|
||||||
|
key={plugin.id}
|
||||||
|
plugin={plugin}
|
||||||
|
pages={pages}
|
||||||
|
currentPath={typeof window !== 'undefined' ? window.location.pathname : ''}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Simplified plugin quick access for header/toolbar
|
||||||
|
export const PluginQuickAccess: React.FC = () => {
|
||||||
|
const { installedPlugins, getPluginPages } = usePlugin();
|
||||||
|
|
||||||
|
// Get plugins with dashboard/main pages
|
||||||
|
const quickAccessPlugins = installedPlugins
|
||||||
|
.filter(plugin =>
|
||||||
|
plugin.status === 'enabled' &&
|
||||||
|
plugin.loaded
|
||||||
|
)
|
||||||
|
.map(plugin => {
|
||||||
|
const pages = getPluginPages(plugin.id);
|
||||||
|
const dashboardPage = pages.find(page =>
|
||||||
|
page.name.toLowerCase().includes('dashboard') ||
|
||||||
|
page.name.toLowerCase().includes('main') ||
|
||||||
|
page.path === '/'
|
||||||
|
);
|
||||||
|
|
||||||
|
return dashboardPage ? { plugin, page: dashboardPage } : null;
|
||||||
|
})
|
||||||
|
.filter(Boolean)
|
||||||
|
.slice(0, 5); // Limit to 5 quick access items
|
||||||
|
|
||||||
|
if (quickAccessPlugins.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
{quickAccessPlugins.map(({ plugin, page }) => {
|
||||||
|
const IconComponent = getIconForPage(page!.name, plugin!.id);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Link key={plugin!.id} href={`/plugins/${plugin!.id}${page!.path}`}>
|
||||||
|
<Button variant="outline" size="sm" className="flex items-center gap-1">
|
||||||
|
<IconComponent className="h-3 w-3" />
|
||||||
|
{plugin!.name}
|
||||||
|
</Button>
|
||||||
|
</Link>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Plugin status indicator for use in other components
|
||||||
|
export const PluginStatusIndicator: React.FC<{ plugin: PluginInfo }> = ({ plugin }) => {
|
||||||
|
const getStatusVariant = () => {
|
||||||
|
if (!plugin.loaded) return 'secondary' as const;
|
||||||
|
if (plugin.health?.status === 'healthy') return 'default' as const;
|
||||||
|
if (plugin.health?.status === 'warning') return 'outline' as const;
|
||||||
|
if (plugin.health?.status === 'error') return 'destructive' as const;
|
||||||
|
return 'default' as const;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStatusText = () => {
|
||||||
|
if (!plugin.loaded) return plugin.status;
|
||||||
|
if (plugin.health?.status) return plugin.health.status;
|
||||||
|
return 'loaded';
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Badge variant={getStatusVariant()}>
|
||||||
|
{getStatusText()}
|
||||||
|
</Badge>
|
||||||
|
);
|
||||||
|
};
|
||||||
303
frontend/src/components/plugins/PluginPageRenderer.tsx
Normal file
303
frontend/src/components/plugins/PluginPageRenderer.tsx
Normal file
@@ -0,0 +1,303 @@
|
|||||||
|
/**
|
||||||
|
* Plugin Page Renderer - Renders plugin pages with security isolation
|
||||||
|
*/
|
||||||
|
"use client"
|
||||||
|
|
||||||
|
import React, { useState, useEffect, useRef } from 'react';
|
||||||
|
import { Alert, AlertDescription } from '@/components/ui/alert';
|
||||||
|
import { Card, CardContent } from '@/components/ui/card';
|
||||||
|
import { Skeleton } from '@/components/ui/skeleton';
|
||||||
|
import { AlertCircle, Loader2 } from 'lucide-react';
|
||||||
|
import { useAuth } from '../../contexts/AuthContext';
|
||||||
|
import { usePlugin, type PluginInfo } from '../../contexts/PluginContext';
|
||||||
|
|
||||||
|
interface PluginPageRendererProps {
|
||||||
|
pluginId: string;
|
||||||
|
pagePath: string;
|
||||||
|
componentName?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PluginIframeProps {
|
||||||
|
pluginId: string;
|
||||||
|
pagePath: string;
|
||||||
|
token: string;
|
||||||
|
onLoad?: () => void;
|
||||||
|
onError?: (error: string) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
const PluginIframe: React.FC<PluginIframeProps> = ({
|
||||||
|
pluginId,
|
||||||
|
pagePath,
|
||||||
|
token,
|
||||||
|
onLoad,
|
||||||
|
onError
|
||||||
|
}) => {
|
||||||
|
const iframeRef = useRef<HTMLIFrameElement>(null);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const iframe = iframeRef.current;
|
||||||
|
if (!iframe) return;
|
||||||
|
|
||||||
|
// Set up iframe communication
|
||||||
|
const handleMessage = (event: MessageEvent) => {
|
||||||
|
// Only accept messages from our iframe
|
||||||
|
if (event.source !== iframe.contentWindow) return;
|
||||||
|
|
||||||
|
// Validate origin - should be from our backend
|
||||||
|
const allowedOrigins = [
|
||||||
|
window.location.origin,
|
||||||
|
'http://localhost:58000',
|
||||||
|
process.env.NEXT_PUBLIC_API_URL
|
||||||
|
].filter(Boolean);
|
||||||
|
|
||||||
|
if (!allowedOrigins.some(origin => event.origin.startsWith(origin))) {
|
||||||
|
console.warn('Rejected message from unauthorized origin:', event.origin);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const message = event.data;
|
||||||
|
|
||||||
|
switch (message.type) {
|
||||||
|
case 'plugin_loaded':
|
||||||
|
setLoading(false);
|
||||||
|
onLoad?.();
|
||||||
|
break;
|
||||||
|
case 'plugin_error':
|
||||||
|
setLoading(false);
|
||||||
|
onError?.(message.error || 'Plugin failed to load');
|
||||||
|
break;
|
||||||
|
case 'plugin_resize':
|
||||||
|
if (message.height && iframe) {
|
||||||
|
iframe.style.height = `${message.height}px`;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'plugin_navigate':
|
||||||
|
// Handle navigation within plugin
|
||||||
|
if (message.path) {
|
||||||
|
// Update URL without reload
|
||||||
|
const newUrl = `/plugins/${pluginId}${message.path}`;
|
||||||
|
window.history.pushState(null, '', newUrl);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Error processing plugin message:', err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
window.addEventListener('message', handleMessage);
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
window.removeEventListener('message', handleMessage);
|
||||||
|
};
|
||||||
|
}, [pluginId, onLoad, onError]);
|
||||||
|
|
||||||
|
const iframeUrl = `/api/v1/plugins/${pluginId}/ui${pagePath}?token=${encodeURIComponent(token)}`;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="relative w-full">
|
||||||
|
{loading && (
|
||||||
|
<div className="absolute inset-0 flex items-center justify-center bg-background/80 z-10">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Loader2 className="h-5 w-5 animate-spin" />
|
||||||
|
<span className="text-sm text-muted-foreground">Loading plugin...</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<iframe
|
||||||
|
ref={iframeRef}
|
||||||
|
src={iframeUrl}
|
||||||
|
title={`Plugin ${pluginId} - ${pagePath}`}
|
||||||
|
className="w-full border-0"
|
||||||
|
style={{
|
||||||
|
minHeight: '400px',
|
||||||
|
maxHeight: '100vh',
|
||||||
|
backgroundColor: 'transparent'
|
||||||
|
}}
|
||||||
|
sandbox="allow-scripts allow-same-origin allow-forms allow-popups allow-modals"
|
||||||
|
onLoad={() => {
|
||||||
|
// Iframe loaded, but plugin might still be initializing
|
||||||
|
// Wait for plugin_loaded message
|
||||||
|
}}
|
||||||
|
onError={() => {
|
||||||
|
setLoading(false);
|
||||||
|
onError?.('Failed to load plugin iframe');
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const PluginComponentRenderer: React.FC<{
|
||||||
|
plugin: PluginInfo;
|
||||||
|
componentName: string;
|
||||||
|
}> = ({ plugin, componentName }) => {
|
||||||
|
const { getPluginComponent } = usePlugin();
|
||||||
|
|
||||||
|
const PluginComponent = getPluginComponent(plugin.id, componentName);
|
||||||
|
|
||||||
|
if (!PluginComponent) {
|
||||||
|
return (
|
||||||
|
<Alert>
|
||||||
|
<AlertCircle className="h-4 w-4" />
|
||||||
|
<AlertDescription>
|
||||||
|
Component '{componentName}' not found in plugin '{plugin.name}'
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="plugin-component-container">
|
||||||
|
<PluginComponent />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const PluginPageRenderer: React.FC<PluginPageRendererProps> = ({
|
||||||
|
pluginId,
|
||||||
|
pagePath,
|
||||||
|
componentName
|
||||||
|
}) => {
|
||||||
|
const { user, token } = useAuth();
|
||||||
|
const {
|
||||||
|
installedPlugins,
|
||||||
|
getPluginPages,
|
||||||
|
isPluginPageAuthorized,
|
||||||
|
loading: pluginsLoading
|
||||||
|
} = usePlugin();
|
||||||
|
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Find the plugin
|
||||||
|
const plugin = installedPlugins.find(p => p.id === pluginId);
|
||||||
|
|
||||||
|
// Get plugin pages
|
||||||
|
const pluginPages = plugin ? getPluginPages(pluginId) : [];
|
||||||
|
const currentPage = pluginPages.find(p => p.path === pagePath);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!pluginsLoading) {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, [pluginsLoading]);
|
||||||
|
|
||||||
|
// Loading state
|
||||||
|
if (loading || pluginsLoading) {
|
||||||
|
return (
|
||||||
|
<Card>
|
||||||
|
<CardContent className="p-6">
|
||||||
|
<div className="space-y-3">
|
||||||
|
<Skeleton className="h-6 w-3/4" />
|
||||||
|
<Skeleton className="h-4 w-1/2" />
|
||||||
|
<Skeleton className="h-32 w-full" />
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Plugin not found
|
||||||
|
if (!plugin) {
|
||||||
|
return (
|
||||||
|
<Alert variant="destructive">
|
||||||
|
<AlertCircle className="h-4 w-4" />
|
||||||
|
<AlertDescription>
|
||||||
|
Plugin '{pluginId}' not found or not installed.
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Plugin not enabled or loaded
|
||||||
|
if (plugin.status !== 'enabled' || !plugin.loaded) {
|
||||||
|
return (
|
||||||
|
<Alert>
|
||||||
|
<AlertCircle className="h-4 w-4" />
|
||||||
|
<AlertDescription>
|
||||||
|
Plugin '{plugin.name}' is not enabled or loaded. Please enable and load the plugin first.
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check authorization
|
||||||
|
if (!isPluginPageAuthorized(pluginId, pagePath)) {
|
||||||
|
return (
|
||||||
|
<Alert variant="destructive">
|
||||||
|
<AlertCircle className="h-4 w-4" />
|
||||||
|
<AlertDescription>
|
||||||
|
You are not authorized to view this plugin page.
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Page not found
|
||||||
|
if (!currentPage && pluginPages.length > 0) {
|
||||||
|
return (
|
||||||
|
<Alert>
|
||||||
|
<AlertCircle className="h-4 w-4" />
|
||||||
|
<AlertDescription>
|
||||||
|
Page '{pagePath}' not found in plugin '{plugin.name}'.
|
||||||
|
Available pages: {pluginPages.map(p => p.path).join(', ')}
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authentication required
|
||||||
|
if (!user || !token) {
|
||||||
|
return (
|
||||||
|
<Alert>
|
||||||
|
<AlertCircle className="h-4 w-4" />
|
||||||
|
<AlertDescription>
|
||||||
|
Authentication required to view plugin pages.
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error state
|
||||||
|
if (error) {
|
||||||
|
return (
|
||||||
|
<Alert variant="destructive">
|
||||||
|
<AlertCircle className="h-4 w-4" />
|
||||||
|
<AlertDescription>
|
||||||
|
Error loading plugin page: {error}
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Render component directly if componentName is provided
|
||||||
|
if (componentName) {
|
||||||
|
return <PluginComponentRenderer plugin={plugin} componentName={componentName} />;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Render plugin page in iframe (default)
|
||||||
|
return (
|
||||||
|
<div className="space-y-4">
|
||||||
|
{currentPage && (
|
||||||
|
<div>
|
||||||
|
<h1 className="text-2xl font-bold">{currentPage.title || currentPage.name}</h1>
|
||||||
|
<p className="text-muted-foreground">
|
||||||
|
{plugin.name} v{plugin.version}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<PluginIframe
|
||||||
|
pluginId={pluginId}
|
||||||
|
pagePath={pagePath}
|
||||||
|
token={token}
|
||||||
|
onLoad={() => setError(null)}
|
||||||
|
onError={setError}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
569
frontend/src/contexts/PluginContext.tsx
Normal file
569
frontend/src/contexts/PluginContext.tsx
Normal file
@@ -0,0 +1,569 @@
|
|||||||
|
"use client"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Plugin Context - Manages plugin state and UI integration
|
||||||
|
*/
|
||||||
|
import React, { createContext, useContext, useState, useEffect, useCallback, ReactNode } from 'react';
|
||||||
|
import { useAuth } from './AuthContext';
|
||||||
|
|
||||||
|
export interface PluginInfo {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
version: string;
|
||||||
|
description: string;
|
||||||
|
author: string;
|
||||||
|
status: 'enabled' | 'disabled' | 'installed' | 'uninstalled';
|
||||||
|
loaded: boolean;
|
||||||
|
manifest?: any;
|
||||||
|
health?: any;
|
||||||
|
resource_usage?: any;
|
||||||
|
pages?: PluginPage[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PluginPage {
|
||||||
|
name: string;
|
||||||
|
path: string;
|
||||||
|
component: string;
|
||||||
|
title?: string;
|
||||||
|
icon?: string;
|
||||||
|
requiresAuth?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PluginConfiguration {
|
||||||
|
plugin_id: string;
|
||||||
|
configuration: Record<string, any>;
|
||||||
|
schema?: any;
|
||||||
|
last_updated?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AvailablePlugin {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
version: string;
|
||||||
|
description: string;
|
||||||
|
author: string;
|
||||||
|
tags: string[];
|
||||||
|
category: string;
|
||||||
|
local_status: {
|
||||||
|
installed: boolean;
|
||||||
|
version?: string;
|
||||||
|
status?: string;
|
||||||
|
update_available?: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PluginContextType {
|
||||||
|
// State
|
||||||
|
installedPlugins: PluginInfo[];
|
||||||
|
availablePlugins: AvailablePlugin[];
|
||||||
|
pluginConfigurations: Record<string, PluginConfiguration>;
|
||||||
|
loading: boolean;
|
||||||
|
error: string | null;
|
||||||
|
|
||||||
|
// Actions
|
||||||
|
refreshInstalledPlugins: () => Promise<void>;
|
||||||
|
searchAvailablePlugins: (query?: string, tags?: string[], category?: string) => Promise<void>;
|
||||||
|
installPlugin: (pluginId: string, version: string) => Promise<boolean>;
|
||||||
|
uninstallPlugin: (pluginId: string, keepData?: boolean) => Promise<boolean>;
|
||||||
|
enablePlugin: (pluginId: string) => Promise<boolean>;
|
||||||
|
disablePlugin: (pluginId: string) => Promise<boolean>;
|
||||||
|
loadPlugin: (pluginId: string) => Promise<boolean>;
|
||||||
|
unloadPlugin: (pluginId: string) => Promise<boolean>;
|
||||||
|
|
||||||
|
// Configuration
|
||||||
|
getPluginConfiguration: (pluginId: string) => Promise<PluginConfiguration | null>;
|
||||||
|
savePluginConfiguration: (pluginId: string, config: Record<string, any>) => Promise<boolean>;
|
||||||
|
getPluginSchema: (pluginId: string) => Promise<any>;
|
||||||
|
|
||||||
|
// UI Integration
|
||||||
|
getPluginPages: (pluginId: string) => PluginPage[];
|
||||||
|
isPluginPageAuthorized: (pluginId: string, pagePath: string) => boolean;
|
||||||
|
getPluginComponent: (pluginId: string, componentName: string) => React.ComponentType | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const PluginContext = createContext<PluginContextType | undefined>(undefined);
|
||||||
|
|
||||||
|
export const usePlugin = () => {
|
||||||
|
const context = useContext(PluginContext);
|
||||||
|
if (context === undefined) {
|
||||||
|
throw new Error('usePlugin must be used within a PluginProvider');
|
||||||
|
}
|
||||||
|
return context;
|
||||||
|
};
|
||||||
|
|
||||||
|
interface PluginProviderProps {
|
||||||
|
children: ReactNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const PluginProvider: React.FC<PluginProviderProps> = ({ children }) => {
|
||||||
|
const { user, token } = useAuth();
|
||||||
|
const [installedPlugins, setInstalledPlugins] = useState<PluginInfo[]>([]);
|
||||||
|
const [availablePlugins, setAvailablePlugins] = useState<AvailablePlugin[]>([]);
|
||||||
|
const [pluginConfigurations, setPluginConfigurations] = useState<Record<string, PluginConfiguration>>({});
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Plugin component registry
|
||||||
|
const [pluginComponents, setPluginComponents] = useState<Record<string, Record<string, React.ComponentType>>>({});
|
||||||
|
|
||||||
|
const apiRequest = async (endpoint: string, options: RequestInit = {}) => {
|
||||||
|
if (!token) {
|
||||||
|
throw new Error('Authentication required');
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(`/api/v1/plugins${endpoint}`, {
|
||||||
|
...options,
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${token}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...options.headers,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json();
|
||||||
|
throw new Error(errorData.detail || `HTTP ${response.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
|
};
|
||||||
|
|
||||||
|
const refreshInstalledPlugins = useCallback(async () => {
|
||||||
|
if (!user || !token) {
|
||||||
|
setError('Authentication required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
const data = await apiRequest('/installed');
|
||||||
|
setInstalledPlugins(data.plugins);
|
||||||
|
|
||||||
|
// Load configurations for installed plugins
|
||||||
|
for (const plugin of data.plugins) {
|
||||||
|
try {
|
||||||
|
const config = await getPluginConfiguration(plugin.id);
|
||||||
|
if (config) {
|
||||||
|
setPluginConfigurations(prev => ({
|
||||||
|
...prev,
|
||||||
|
[plugin.id]: config
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.warn(`Failed to load config for plugin ${plugin.id}:`, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to load installed plugins');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, [user, token]);
|
||||||
|
|
||||||
|
const searchAvailablePlugins = useCallback(async (query = '', tags: string[] = [], category = '') => {
|
||||||
|
if (!user || !token) {
|
||||||
|
setError('Authentication required');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
const params = new URLSearchParams();
|
||||||
|
if (query) params.append('query', query);
|
||||||
|
if (tags.length > 0) params.append('tags', tags.join(','));
|
||||||
|
if (category) params.append('category', category);
|
||||||
|
|
||||||
|
const data = await apiRequest(`/discover?${params.toString()}`);
|
||||||
|
setAvailablePlugins(data.plugins);
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to search plugins');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, [user, token]);
|
||||||
|
|
||||||
|
const installPlugin = useCallback(async (pluginId: string, version: string): Promise<boolean> => {
|
||||||
|
try {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
await apiRequest('/install', {
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify({
|
||||||
|
plugin_id: pluginId,
|
||||||
|
version: version,
|
||||||
|
source: 'repository'
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Refresh plugins after installation
|
||||||
|
await refreshInstalledPlugins();
|
||||||
|
await searchAvailablePlugins(); // Refresh to update local status
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Installation failed');
|
||||||
|
return false;
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, [refreshInstalledPlugins, searchAvailablePlugins]);
|
||||||
|
|
||||||
|
const uninstallPlugin = async (pluginId: string, keepData = true): Promise<boolean> => {
|
||||||
|
try {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
await apiRequest(`/${pluginId}`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
body: JSON.stringify({ keep_data: keepData }),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Remove from state
|
||||||
|
setInstalledPlugins(prev => prev.filter(p => p.id !== pluginId));
|
||||||
|
setPluginConfigurations(prev => {
|
||||||
|
const { [pluginId]: removed, ...rest } = prev;
|
||||||
|
return rest;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Unregister components
|
||||||
|
setPluginComponents(prev => {
|
||||||
|
const { [pluginId]: removed, ...rest } = prev;
|
||||||
|
return rest;
|
||||||
|
});
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Uninstallation failed');
|
||||||
|
return false;
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const enablePlugin = async (pluginId: string): Promise<boolean> => {
|
||||||
|
try {
|
||||||
|
await apiRequest(`/${pluginId}/enable`, { method: 'POST' });
|
||||||
|
|
||||||
|
// Update plugin status
|
||||||
|
setInstalledPlugins(prev =>
|
||||||
|
prev.map(p => p.id === pluginId ? { ...p, status: 'enabled' } : p)
|
||||||
|
);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Enable failed');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const disablePlugin = async (pluginId: string): Promise<boolean> => {
|
||||||
|
try {
|
||||||
|
await apiRequest(`/${pluginId}/disable`, { method: 'POST' });
|
||||||
|
|
||||||
|
// Update plugin status
|
||||||
|
setInstalledPlugins(prev =>
|
||||||
|
prev.map(p => p.id === pluginId ? { ...p, status: 'disabled', loaded: false } : p)
|
||||||
|
);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Disable failed');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const loadPlugin = async (pluginId: string): Promise<boolean> => {
|
||||||
|
try {
|
||||||
|
await apiRequest(`/${pluginId}/load`, { method: 'POST' });
|
||||||
|
|
||||||
|
// Update plugin status
|
||||||
|
setInstalledPlugins(prev =>
|
||||||
|
prev.map(p => p.id === pluginId ? { ...p, loaded: true } : p)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Load plugin UI components
|
||||||
|
await loadPluginComponents(pluginId);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Load failed');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const unloadPlugin = async (pluginId: string): Promise<boolean> => {
|
||||||
|
try {
|
||||||
|
await apiRequest(`/${pluginId}/unload`, { method: 'POST' });
|
||||||
|
|
||||||
|
// Update plugin status
|
||||||
|
setInstalledPlugins(prev =>
|
||||||
|
prev.map(p => p.id === pluginId ? { ...p, loaded: false } : p)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Unregister components
|
||||||
|
setPluginComponents(prev => {
|
||||||
|
const { [pluginId]: removed, ...rest } = prev;
|
||||||
|
return rest;
|
||||||
|
});
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Unload failed');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getPluginConfiguration = async (pluginId: string): Promise<PluginConfiguration | null> => {
|
||||||
|
try {
|
||||||
|
const data = await apiRequest(`/${pluginId}/config`);
|
||||||
|
return data;
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Failed to get configuration for plugin ${pluginId}:`, err);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const savePluginConfiguration = async (pluginId: string, config: Record<string, any>): Promise<boolean> => {
|
||||||
|
try {
|
||||||
|
await apiRequest(`/${pluginId}/config`, {
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify({ configuration: config }),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update local state
|
||||||
|
setPluginConfigurations(prev => ({
|
||||||
|
...prev,
|
||||||
|
[pluginId]: {
|
||||||
|
plugin_id: pluginId,
|
||||||
|
configuration: config,
|
||||||
|
last_updated: new Date().toISOString()
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to save configuration');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getPluginSchema = async (pluginId: string): Promise<any> => {
|
||||||
|
try {
|
||||||
|
// Add cache-busting timestamp to force fresh schema fetch
|
||||||
|
const cacheBust = Date.now();
|
||||||
|
const data = await apiRequest(`/${pluginId}/schema?t=${cacheBust}`);
|
||||||
|
let schema = data.schema;
|
||||||
|
|
||||||
|
// For certain plugins, we need to populate dynamic options
|
||||||
|
// Find the plugin by ID to get its name
|
||||||
|
const plugin = installedPlugins.find(p => p.id === pluginId);
|
||||||
|
const pluginName = plugin?.name?.toLowerCase();
|
||||||
|
|
||||||
|
if (schema && pluginName === 'zammad') {
|
||||||
|
// Populate chatbot options for Zammad
|
||||||
|
try {
|
||||||
|
const chatbotsResponse = await fetch('/api/v1/chatbot/list', {
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${token}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (chatbotsResponse.ok) {
|
||||||
|
const chatbotsData = await chatbotsResponse.json();
|
||||||
|
const chatbots = chatbotsData.chatbots || [];
|
||||||
|
|
||||||
|
if (schema.properties?.chatbot_id) {
|
||||||
|
schema.properties.chatbot_id.type = 'select';
|
||||||
|
schema.properties.chatbot_id.options = chatbots.map((chatbot: any) => ({
|
||||||
|
value: chatbot.id,
|
||||||
|
label: `${chatbot.name} (${chatbot.chatbot_type})`
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (chatbotError) {
|
||||||
|
console.warn('Failed to load chatbots for Zammad configuration:', chatbotError);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate model options for AI settings
|
||||||
|
try {
|
||||||
|
const modelsResponse = await fetch('/api/v1/llm/models', {
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${token}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (modelsResponse.ok) {
|
||||||
|
const modelsData = await modelsResponse.json();
|
||||||
|
const models = modelsData.data || [];
|
||||||
|
|
||||||
|
const modelOptions = models.map((model: any) => ({
|
||||||
|
value: model.id,
|
||||||
|
label: model.id
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Set model options for AI summarization
|
||||||
|
if (schema.properties?.ai_summarization?.properties?.model) {
|
||||||
|
schema.properties.ai_summarization.properties.model.type = 'select';
|
||||||
|
schema.properties.ai_summarization.properties.model.options = modelOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set model options for draft settings
|
||||||
|
if (schema.properties?.draft_settings?.properties?.model) {
|
||||||
|
schema.properties.draft_settings.properties.model.type = 'select';
|
||||||
|
schema.properties.draft_settings.properties.model.options = modelOptions;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (modelError) {
|
||||||
|
console.warn('Failed to load models for Zammad configuration:', modelError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (schema && pluginName === 'signal') {
|
||||||
|
// Populate model options for Signal bot
|
||||||
|
try {
|
||||||
|
const modelsResponse = await fetch('/api/v1/llm/models', {
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${token}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (modelsResponse.ok) {
|
||||||
|
const modelsData = await modelsResponse.json();
|
||||||
|
const models = modelsData.models || [];
|
||||||
|
|
||||||
|
if (schema.properties?.model) {
|
||||||
|
schema.properties.model.options = models.map((model: any) => ({
|
||||||
|
value: model.id,
|
||||||
|
label: model.name || model.id
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (modelError) {
|
||||||
|
console.warn('Failed to load models for Signal configuration:', modelError);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return schema;
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Failed to get schema for plugin ${pluginId}:`, err);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const loadPluginComponents = async (pluginId: string) => {
|
||||||
|
try {
|
||||||
|
// Load plugin UI components dynamically
|
||||||
|
// This would involve loading the plugin's built JavaScript bundle
|
||||||
|
// For now, we'll use a placeholder system
|
||||||
|
|
||||||
|
const plugin = installedPlugins.find(p => p.id === pluginId);
|
||||||
|
if (!plugin || !plugin.manifest) return;
|
||||||
|
|
||||||
|
const uiConfig = plugin.manifest.spec?.ui_config;
|
||||||
|
if (!uiConfig) return;
|
||||||
|
|
||||||
|
// Register placeholder components for now
|
||||||
|
const components: Record<string, React.ComponentType> = {};
|
||||||
|
|
||||||
|
if (uiConfig.pages) {
|
||||||
|
for (const page of uiConfig.pages) {
|
||||||
|
components[page.component] = createPluginComponent(pluginId, page.component);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setPluginComponents(prev => ({
|
||||||
|
...prev,
|
||||||
|
[pluginId]: components
|
||||||
|
}));
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Failed to load components for plugin ${pluginId}:`, err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const createPluginComponent = (pluginId: string, componentName: string): React.ComponentType => {
|
||||||
|
return () => (
|
||||||
|
<div className="plugin-component-placeholder">
|
||||||
|
<h3>Plugin Component: {componentName}</h3>
|
||||||
|
<p>Plugin: {pluginId}</p>
|
||||||
|
<p>This is a placeholder for the plugin component that would be loaded dynamically.</p>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const getPluginPages = (pluginId: string): PluginPage[] => {
|
||||||
|
const plugin = installedPlugins.find(p => p.id === pluginId);
|
||||||
|
if (!plugin || !plugin.manifest) return [];
|
||||||
|
|
||||||
|
const uiConfig = plugin.manifest.spec?.ui_config;
|
||||||
|
return uiConfig?.pages || [];
|
||||||
|
};
|
||||||
|
|
||||||
|
const isPluginPageAuthorized = (pluginId: string, pagePath: string): boolean => {
|
||||||
|
// TODO: Implement authorization logic based on user permissions
|
||||||
|
const plugin = installedPlugins.find(p => p.id === pluginId);
|
||||||
|
return plugin?.status === 'enabled' && plugin?.loaded;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getPluginComponent = (pluginId: string, componentName: string): React.ComponentType | null => {
|
||||||
|
return pluginComponents[pluginId]?.[componentName] || null;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Load installed plugins on mount, but only when authenticated
|
||||||
|
useEffect(() => {
|
||||||
|
if (user && token) {
|
||||||
|
refreshInstalledPlugins();
|
||||||
|
} else {
|
||||||
|
// Clear plugin data when not authenticated
|
||||||
|
setInstalledPlugins([]);
|
||||||
|
setAvailablePlugins([]);
|
||||||
|
setPluginConfigurations({});
|
||||||
|
setError(null);
|
||||||
|
}
|
||||||
|
}, [user, token, refreshInstalledPlugins]);
|
||||||
|
|
||||||
|
const value: PluginContextType = {
|
||||||
|
// State
|
||||||
|
installedPlugins,
|
||||||
|
availablePlugins,
|
||||||
|
pluginConfigurations,
|
||||||
|
loading,
|
||||||
|
error,
|
||||||
|
|
||||||
|
// Actions
|
||||||
|
refreshInstalledPlugins,
|
||||||
|
searchAvailablePlugins,
|
||||||
|
installPlugin,
|
||||||
|
uninstallPlugin,
|
||||||
|
enablePlugin,
|
||||||
|
disablePlugin,
|
||||||
|
loadPlugin,
|
||||||
|
unloadPlugin,
|
||||||
|
|
||||||
|
// Configuration
|
||||||
|
getPluginConfiguration,
|
||||||
|
savePluginConfiguration,
|
||||||
|
getPluginSchema,
|
||||||
|
|
||||||
|
// UI Integration
|
||||||
|
getPluginPages,
|
||||||
|
isPluginPageAuthorized,
|
||||||
|
getPluginComponent,
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<PluginContext.Provider value={value}>
|
||||||
|
{children}
|
||||||
|
</PluginContext.Provider>
|
||||||
|
);
|
||||||
|
};
|
||||||
90
plugins/zammad/alembic.ini
Normal file
90
plugins/zammad/alembic.ini
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = migrations
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python-dateutil library that can be installed by running "pip install alembic[tz]"
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version path separator; As mentioned above, this is the character used to split
|
||||||
|
# version_locations. The default within new alembic.ini files is "os", which uses
|
||||||
|
# os.pathsep. If this key is omitted entirely, it falls back to the legacy
|
||||||
|
# behavior of splitting on spaces and/or commas.
|
||||||
|
# Valid values for version_path_separator are:
|
||||||
|
#
|
||||||
|
# version_path_separator = :
|
||||||
|
# version_path_separator = ;
|
||||||
|
# version_path_separator = space
|
||||||
|
version_path_separator = os
|
||||||
|
|
||||||
|
# set to 'true' to search source files recursively
|
||||||
|
# in each "version_locations" directory
|
||||||
|
# new in Alembic version 1.10
|
||||||
|
# recursive_version_locations = false
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
||||||
719
plugins/zammad/main.py
Normal file
719
plugins/zammad/main.py
Normal file
@@ -0,0 +1,719 @@
|
|||||||
|
"""
|
||||||
|
Zammad Plugin Implementation
|
||||||
|
Provides integration between Enclava platform and Zammad helpdesk system
|
||||||
|
"""
|
||||||
|
from typing import Dict, Any, List, Optional
|
||||||
|
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
|
||||||
|
from pydantic import BaseModel
|
||||||
|
import aiohttp
|
||||||
|
import asyncio
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
from app.services.base_plugin import BasePlugin, PluginContext
|
||||||
|
from app.services.plugin_database import PluginDatabaseSession, plugin_db_manager
|
||||||
|
from app.services.plugin_security import plugin_security_policy_manager
|
||||||
|
from sqlalchemy import Column, String, DateTime, Text, Boolean, Integer, ForeignKey
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
|
class ZammadTicket(BaseModel):
|
||||||
|
"""Zammad ticket model"""
|
||||||
|
id: str
|
||||||
|
title: str
|
||||||
|
body: str
|
||||||
|
status: str
|
||||||
|
priority: str
|
||||||
|
customer_id: str
|
||||||
|
group_id: str
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
ai_summary: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ZammadConfiguration(BaseModel):
|
||||||
|
"""Zammad configuration model"""
|
||||||
|
name: str
|
||||||
|
zammad_url: str
|
||||||
|
api_token: str
|
||||||
|
chatbot_id: str
|
||||||
|
ai_summarization: Dict[str, Any]
|
||||||
|
sync_settings: Dict[str, Any]
|
||||||
|
webhook_settings: Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
# Plugin database models
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
class ZammadConfiguration(Base):
|
||||||
|
__tablename__ = "zammad_configurations"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
user_id = Column(String, nullable=False, index=True)
|
||||||
|
name = Column(String(100), nullable=False)
|
||||||
|
zammad_url = Column(String(500), nullable=False)
|
||||||
|
api_token_encrypted = Column(Text, nullable=False)
|
||||||
|
chatbot_id = Column(String(100), nullable=False)
|
||||||
|
is_active = Column(Boolean, default=True)
|
||||||
|
ai_summarization_enabled = Column(Boolean, default=True)
|
||||||
|
auto_summarize = Column(Boolean, default=True)
|
||||||
|
sync_enabled = Column(Boolean, default=True)
|
||||||
|
sync_interval_hours = Column(Integer, default=2)
|
||||||
|
created_at = Column(DateTime, default=datetime.now(timezone.utc))
|
||||||
|
updated_at = Column(DateTime, default=datetime.now(timezone.utc), onupdate=datetime.now(timezone.utc))
|
||||||
|
|
||||||
|
class ZammadTicket(Base):
|
||||||
|
__tablename__ = "zammad_tickets"
|
||||||
|
|
||||||
|
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||||
|
zammad_ticket_id = Column(String(50), nullable=False, index=True)
|
||||||
|
configuration_id = Column(UUID(as_uuid=True), ForeignKey("zammad_configurations.id"))
|
||||||
|
title = Column(String(500), nullable=False)
|
||||||
|
body = Column(Text)
|
||||||
|
status = Column(String(50))
|
||||||
|
priority = Column(String(50))
|
||||||
|
customer_id = Column(String(50))
|
||||||
|
group_id = Column(String(50))
|
||||||
|
ai_summary = Column(Text)
|
||||||
|
last_synced = Column(DateTime, default=datetime.now(timezone.utc))
|
||||||
|
created_at = Column(DateTime, default=datetime.now(timezone.utc))
|
||||||
|
updated_at = Column(DateTime, default=datetime.now(timezone.utc), onupdate=datetime.now(timezone.utc))
|
||||||
|
|
||||||
|
configuration = relationship("ZammadConfiguration", back_populates="tickets")
|
||||||
|
|
||||||
|
ZammadConfiguration.tickets = relationship("ZammadTicket", back_populates="configuration")
|
||||||
|
|
||||||
|
class ZammadPlugin(BasePlugin):
|
||||||
|
"""Zammad helpdesk integration plugin with full framework integration"""
|
||||||
|
|
||||||
|
def __init__(self, manifest, plugin_token: str):
|
||||||
|
super().__init__(manifest, plugin_token)
|
||||||
|
self.zammad_client = None
|
||||||
|
self.db_models = [ZammadConfiguration, ZammadTicket]
|
||||||
|
|
||||||
|
async def initialize(self) -> bool:
|
||||||
|
"""Initialize Zammad plugin with database setup"""
|
||||||
|
try:
|
||||||
|
self.logger.info("Initializing Zammad plugin")
|
||||||
|
|
||||||
|
# Create database tables
|
||||||
|
await self._create_database_tables()
|
||||||
|
|
||||||
|
# Test platform API connectivity
|
||||||
|
health = await self.api_client.get("/health")
|
||||||
|
self.logger.info(f"Platform API health: {health.get('status')}")
|
||||||
|
|
||||||
|
# Validate security policy
|
||||||
|
policy = plugin_security_policy_manager.get_security_policy(self.plugin_id, None)
|
||||||
|
self.logger.info(f"Security policy loaded: {policy.get('max_api_calls_per_minute')} calls/min")
|
||||||
|
|
||||||
|
self.logger.info("Zammad plugin initialized successfully")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to initialize Zammad plugin: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _create_database_tables(self):
|
||||||
|
"""Create plugin database tables"""
|
||||||
|
try:
|
||||||
|
engine = await plugin_db_manager.get_plugin_engine(self.plugin_id)
|
||||||
|
if engine:
|
||||||
|
async with engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
self.logger.info("Database tables created successfully")
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to create database tables: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def cleanup(self) -> bool:
|
||||||
|
"""Cleanup plugin resources"""
|
||||||
|
try:
|
||||||
|
self.logger.info("Cleaning up Zammad plugin")
|
||||||
|
# Close any open connections
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error during cleanup: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_api_router(self) -> APIRouter:
|
||||||
|
"""Return FastAPI router for Zammad endpoints"""
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.get("/health")
|
||||||
|
async def health_check():
|
||||||
|
"""Plugin health check endpoint"""
|
||||||
|
return await self.health_check()
|
||||||
|
|
||||||
|
@router.get("/tickets")
|
||||||
|
async def get_tickets(context: PluginContext = Depends(self.get_auth_context)):
|
||||||
|
"""Get tickets from Zammad"""
|
||||||
|
try:
|
||||||
|
self._track_request()
|
||||||
|
|
||||||
|
config = await self.get_active_config(context.user_id)
|
||||||
|
if not config:
|
||||||
|
raise HTTPException(status_code=404, detail="No Zammad configuration found")
|
||||||
|
|
||||||
|
tickets = await self.fetch_tickets_from_zammad(config)
|
||||||
|
return {"tickets": tickets, "count": len(tickets)}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self._track_request(success=False)
|
||||||
|
self.logger.error(f"Error fetching tickets: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
@router.get("/tickets/{ticket_id}")
|
||||||
|
async def get_ticket(ticket_id: str, context: PluginContext = Depends(self.get_auth_context)):
|
||||||
|
"""Get specific ticket from Zammad"""
|
||||||
|
try:
|
||||||
|
self._track_request()
|
||||||
|
|
||||||
|
config = await self.get_active_config(context.user_id)
|
||||||
|
if not config:
|
||||||
|
raise HTTPException(status_code=404, detail="No Zammad configuration found")
|
||||||
|
|
||||||
|
ticket = await self.fetch_ticket_from_zammad(config, ticket_id)
|
||||||
|
return {"ticket": ticket}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self._track_request(success=False)
|
||||||
|
self.logger.error(f"Error fetching ticket {ticket_id}: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
@router.post("/tickets/{ticket_id}/summarize")
|
||||||
|
async def summarize_ticket(
|
||||||
|
ticket_id: str,
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
context: PluginContext = Depends(self.get_auth_context)
|
||||||
|
):
|
||||||
|
"""Generate AI summary for ticket"""
|
||||||
|
try:
|
||||||
|
self._track_request()
|
||||||
|
|
||||||
|
config = await self.get_active_config(context.user_id)
|
||||||
|
if not config:
|
||||||
|
raise HTTPException(status_code=404, detail="No Zammad configuration found")
|
||||||
|
|
||||||
|
# Start summarization in background
|
||||||
|
background_tasks.add_task(
|
||||||
|
self.summarize_ticket_async,
|
||||||
|
config,
|
||||||
|
ticket_id,
|
||||||
|
context.user_id
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "started",
|
||||||
|
"ticket_id": ticket_id,
|
||||||
|
"message": "AI summarization started in background"
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self._track_request(success=False)
|
||||||
|
self.logger.error(f"Error starting summarization for ticket {ticket_id}: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
@router.post("/webhooks/ticket-created")
|
||||||
|
async def handle_ticket_webhook(webhook_data: Dict[str, Any]):
|
||||||
|
"""Handle Zammad webhook for new tickets"""
|
||||||
|
try:
|
||||||
|
ticket_id = webhook_data.get("ticket", {}).get("id")
|
||||||
|
if not ticket_id:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid webhook data")
|
||||||
|
|
||||||
|
self.logger.info(f"Received webhook for ticket: {ticket_id}")
|
||||||
|
|
||||||
|
# Process webhook asynchronously
|
||||||
|
asyncio.create_task(self.process_ticket_webhook(webhook_data))
|
||||||
|
|
||||||
|
return {"status": "processed", "ticket_id": ticket_id}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error processing webhook: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
@router.get("/configurations")
|
||||||
|
async def get_configurations(context: PluginContext = Depends(self.get_auth_context)):
|
||||||
|
"""Get user's Zammad configurations"""
|
||||||
|
try:
|
||||||
|
configs = await self.get_user_configurations(context.user_id)
|
||||||
|
return {"configurations": configs}
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error fetching configurations: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
@router.post("/configurations")
|
||||||
|
async def create_configuration(
|
||||||
|
config_data: Dict[str, Any],
|
||||||
|
context: PluginContext = Depends(self.get_auth_context)
|
||||||
|
):
|
||||||
|
"""Create new Zammad configuration"""
|
||||||
|
try:
|
||||||
|
# Validate configuration against schema
|
||||||
|
schema = await self.get_configuration_schema()
|
||||||
|
is_valid, errors = await self.config.validate_config(config_data, schema)
|
||||||
|
|
||||||
|
if not is_valid:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Invalid configuration: {errors}")
|
||||||
|
|
||||||
|
# Test connection before saving
|
||||||
|
connection_test = await self.test_zammad_connection(config_data)
|
||||||
|
if not connection_test["success"]:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Connection test failed: {connection_test['error']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Save configuration to plugin database
|
||||||
|
success = await self._save_configuration_to_db(config_data, context.user_id)
|
||||||
|
if not success:
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to save configuration")
|
||||||
|
|
||||||
|
return {"status": "created", "config": {"name": config_data.get("name"), "zammad_url": config_data.get("zammad_url")}}
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error creating configuration: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
@router.get("/statistics")
|
||||||
|
async def get_statistics(context: PluginContext = Depends(self.get_auth_context)):
|
||||||
|
"""Get plugin usage statistics"""
|
||||||
|
try:
|
||||||
|
stats = await self._get_plugin_statistics(context.user_id)
|
||||||
|
return stats
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error getting statistics: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
@router.get("/tickets/sync")
|
||||||
|
async def sync_tickets_manual(context: PluginContext = Depends(self.get_auth_context)):
|
||||||
|
"""Manually trigger ticket sync"""
|
||||||
|
try:
|
||||||
|
result = await self._sync_user_tickets(context.user_id)
|
||||||
|
return {"status": "completed", "synced_count": result}
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error syncing tickets: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
return router
|
||||||
|
|
||||||
|
# Plugin-specific methods
|
||||||
|
|
||||||
|
async def get_active_config(self, user_id: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get active Zammad configuration for user from database"""
|
||||||
|
try:
|
||||||
|
async with PluginDatabaseSession(self.plugin_id, plugin_db_manager) as db:
|
||||||
|
config = await db.query(ZammadConfiguration).filter(
|
||||||
|
ZammadConfiguration.user_id == user_id,
|
||||||
|
ZammadConfiguration.is_active == True
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if config:
|
||||||
|
# Decrypt API token
|
||||||
|
from app.services.plugin_security import plugin_token_manager
|
||||||
|
api_token = plugin_token_manager.decrypt_plugin_secret(config.api_token_encrypted)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"id": str(config.id),
|
||||||
|
"name": config.name,
|
||||||
|
"zammad_url": config.zammad_url,
|
||||||
|
"api_token": api_token,
|
||||||
|
"chatbot_id": config.chatbot_id,
|
||||||
|
"ai_summarization": {
|
||||||
|
"enabled": config.ai_summarization_enabled,
|
||||||
|
"auto_summarize": config.auto_summarize
|
||||||
|
},
|
||||||
|
"sync_settings": {
|
||||||
|
"enabled": config.sync_enabled,
|
||||||
|
"interval_hours": config.sync_interval_hours
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to get active config: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def get_user_configurations(self, user_id: str) -> List[Dict[str, Any]]:
|
||||||
|
"""Get all configurations for user from database"""
|
||||||
|
try:
|
||||||
|
async with PluginDatabaseSession(self.plugin_id, plugin_db_manager) as db:
|
||||||
|
configs = await db.query(ZammadConfiguration).filter(
|
||||||
|
ZammadConfiguration.user_id == user_id
|
||||||
|
).all()
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for config in configs:
|
||||||
|
result.append({
|
||||||
|
"id": str(config.id),
|
||||||
|
"name": config.name,
|
||||||
|
"zammad_url": config.zammad_url,
|
||||||
|
"chatbot_id": config.chatbot_id,
|
||||||
|
"is_active": config.is_active,
|
||||||
|
"created_at": config.created_at.isoformat(),
|
||||||
|
"updated_at": config.updated_at.isoformat()
|
||||||
|
})
|
||||||
|
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to get user configurations: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def fetch_tickets_from_zammad(self, config: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||||
|
"""Fetch tickets from Zammad API"""
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Token {config['api_token']}",
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
async with session.get(
|
||||||
|
f"{config['zammad_url']}/api/v1/tickets",
|
||||||
|
headers=headers,
|
||||||
|
timeout=30
|
||||||
|
) as response:
|
||||||
|
if response.status != 200:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=response.status,
|
||||||
|
detail=f"Zammad API error: {await response.text()}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return await response.json()
|
||||||
|
|
||||||
|
async def fetch_ticket_from_zammad(self, config: Dict[str, Any], ticket_id: str) -> Dict[str, Any]:
|
||||||
|
"""Fetch specific ticket from Zammad"""
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Token {config['api_token']}",
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
async with session.get(
|
||||||
|
f"{config['zammad_url']}/api/v1/tickets/{ticket_id}",
|
||||||
|
headers=headers,
|
||||||
|
timeout=30
|
||||||
|
) as response:
|
||||||
|
if response.status != 200:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=response.status,
|
||||||
|
detail=f"Zammad API error: {await response.text()}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return await response.json()
|
||||||
|
|
||||||
|
async def summarize_ticket_async(self, config: Dict[str, Any], ticket_id: str, user_id: str):
|
||||||
|
"""Asynchronously summarize a ticket using platform AI"""
|
||||||
|
try:
|
||||||
|
# Get ticket details
|
||||||
|
ticket = await self.fetch_ticket_from_zammad(config, ticket_id)
|
||||||
|
|
||||||
|
# Use platform chatbot API for summarization
|
||||||
|
chatbot_response = await self.api_client.call_chatbot_api(
|
||||||
|
chatbot_id=config["chatbot_id"],
|
||||||
|
message=f"Summarize this support ticket:\n\nTitle: {ticket.get('title', '')}\n\nContent: {ticket.get('body', '')}"
|
||||||
|
)
|
||||||
|
|
||||||
|
summary = chatbot_response.get("response", "")
|
||||||
|
|
||||||
|
# TODO: Store summary in database
|
||||||
|
self.logger.info(f"Generated summary for ticket {ticket_id}: {summary[:100]}...")
|
||||||
|
|
||||||
|
# Update ticket in Zammad with summary
|
||||||
|
await self.update_ticket_summary(config, ticket_id, summary)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error summarizing ticket {ticket_id}: {e}")
|
||||||
|
|
||||||
|
async def update_ticket_summary(self, config: Dict[str, Any], ticket_id: str, summary: str):
|
||||||
|
"""Update ticket with AI summary"""
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Token {config['api_token']}",
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
update_data = {
|
||||||
|
"note": f"AI Summary: {summary}"
|
||||||
|
}
|
||||||
|
|
||||||
|
async with session.put(
|
||||||
|
f"{config['zammad_url']}/api/v1/tickets/{ticket_id}",
|
||||||
|
headers=headers,
|
||||||
|
json=update_data,
|
||||||
|
timeout=30
|
||||||
|
) as response:
|
||||||
|
if response.status not in [200, 201]:
|
||||||
|
self.logger.error(f"Failed to update ticket {ticket_id} with summary")
|
||||||
|
|
||||||
|
async def test_zammad_connection(self, config: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Test connection to Zammad instance"""
|
||||||
|
try:
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Token {config['api_token']}",
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
async with session.get(
|
||||||
|
f"{config['zammad_url']}/api/v1/users/me",
|
||||||
|
headers=headers,
|
||||||
|
timeout=10
|
||||||
|
) as response:
|
||||||
|
if response.status == 200:
|
||||||
|
user_data = await response.json()
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"user": user_data.get("login", "unknown"),
|
||||||
|
"zammad_version": response.headers.get("X-Zammad-Version", "unknown")
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": f"HTTP {response.status}: {await response.text()}"
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": str(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
async def process_ticket_webhook(self, webhook_data: Dict[str, Any]):
|
||||||
|
"""Process ticket webhook asynchronously"""
|
||||||
|
try:
|
||||||
|
ticket_data = webhook_data.get("ticket", {})
|
||||||
|
ticket_id = ticket_data.get("id")
|
||||||
|
|
||||||
|
self.logger.info(f"Processing webhook for ticket {ticket_id}")
|
||||||
|
|
||||||
|
# TODO: Get configuration and auto-summarize if enabled
|
||||||
|
# This would require looking up the configuration associated with the webhook
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error processing webhook: {e}")
|
||||||
|
|
||||||
|
# Cron job functions
|
||||||
|
|
||||||
|
async def sync_tickets_from_zammad(self) -> bool:
|
||||||
|
"""Sync tickets from Zammad (cron job)"""
|
||||||
|
try:
|
||||||
|
self.logger.info("Starting ticket sync from Zammad")
|
||||||
|
|
||||||
|
# TODO: Get all active configurations and sync tickets
|
||||||
|
# This would iterate through all user configurations
|
||||||
|
|
||||||
|
self.logger.info("Ticket sync completed successfully")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Ticket sync failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def cleanup_old_summaries(self) -> bool:
|
||||||
|
"""Clean up old AI summaries (cron job)"""
|
||||||
|
try:
|
||||||
|
self.logger.info("Starting cleanup of old summaries")
|
||||||
|
|
||||||
|
# TODO: Clean up summaries older than retention period
|
||||||
|
|
||||||
|
self.logger.info("Summary cleanup completed")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Summary cleanup failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def check_zammad_connection(self) -> bool:
|
||||||
|
"""Check Zammad connectivity (cron job)"""
|
||||||
|
try:
|
||||||
|
# TODO: Test all configured Zammad instances
|
||||||
|
self.logger.info("Zammad connectivity check completed")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Connectivity check failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def generate_weekly_reports(self) -> bool:
|
||||||
|
"""Generate weekly reports (cron job)"""
|
||||||
|
try:
|
||||||
|
self.logger.info("Generating weekly reports")
|
||||||
|
|
||||||
|
# TODO: Generate and send weekly ticket reports
|
||||||
|
|
||||||
|
self.logger.info("Weekly reports generated successfully")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Report generation failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Enhanced database integration methods
|
||||||
|
|
||||||
|
async def _save_configuration_to_db(self, config_data: Dict[str, Any], user_id: str) -> bool:
|
||||||
|
"""Save Zammad configuration to plugin database"""
|
||||||
|
try:
|
||||||
|
from app.services.plugin_security import plugin_token_manager
|
||||||
|
|
||||||
|
# Encrypt API token
|
||||||
|
encrypted_token = plugin_token_manager.encrypt_plugin_secret(config_data["api_token"])
|
||||||
|
|
||||||
|
async with PluginDatabaseSession(self.plugin_id, plugin_db_manager) as db:
|
||||||
|
# Deactivate existing configurations if this is set as active
|
||||||
|
if config_data.get("is_active", True):
|
||||||
|
await db.query(ZammadConfiguration).filter(
|
||||||
|
ZammadConfiguration.user_id == user_id,
|
||||||
|
ZammadConfiguration.is_active == True
|
||||||
|
).update({"is_active": False})
|
||||||
|
|
||||||
|
# Create new configuration
|
||||||
|
config = ZammadConfiguration(
|
||||||
|
user_id=user_id,
|
||||||
|
name=config_data["name"],
|
||||||
|
zammad_url=config_data["zammad_url"],
|
||||||
|
api_token_encrypted=encrypted_token,
|
||||||
|
chatbot_id=config_data["chatbot_id"],
|
||||||
|
is_active=config_data.get("is_active", True),
|
||||||
|
ai_summarization_enabled=config_data.get("ai_summarization", {}).get("enabled", True),
|
||||||
|
auto_summarize=config_data.get("ai_summarization", {}).get("auto_summarize", True),
|
||||||
|
sync_enabled=config_data.get("sync_settings", {}).get("enabled", True),
|
||||||
|
sync_interval_hours=config_data.get("sync_settings", {}).get("interval_hours", 2)
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(config)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
self.logger.info(f"Saved Zammad configuration for user {user_id}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to save configuration: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _get_plugin_statistics(self, user_id: str) -> Dict[str, Any]:
|
||||||
|
"""Get plugin usage statistics"""
|
||||||
|
try:
|
||||||
|
async with PluginDatabaseSession(self.plugin_id, plugin_db_manager) as db:
|
||||||
|
# Get configuration count
|
||||||
|
config_count = await db.query(ZammadConfiguration).filter(
|
||||||
|
ZammadConfiguration.user_id == user_id
|
||||||
|
).count()
|
||||||
|
|
||||||
|
# Get ticket count
|
||||||
|
ticket_count = await db.query(ZammadTicket).join(ZammadConfiguration).filter(
|
||||||
|
ZammadConfiguration.user_id == user_id
|
||||||
|
).count()
|
||||||
|
|
||||||
|
# Get tickets with AI summaries
|
||||||
|
summarized_count = await db.query(ZammadTicket).join(ZammadConfiguration).filter(
|
||||||
|
ZammadConfiguration.user_id == user_id,
|
||||||
|
ZammadTicket.ai_summary.isnot(None)
|
||||||
|
).count()
|
||||||
|
|
||||||
|
# Get recent activity (last 7 days)
|
||||||
|
from datetime import timedelta
|
||||||
|
week_ago = datetime.now(timezone.utc) - timedelta(days=7)
|
||||||
|
recent_tickets = await db.query(ZammadTicket).join(ZammadConfiguration).filter(
|
||||||
|
ZammadConfiguration.user_id == user_id,
|
||||||
|
ZammadTicket.last_synced >= week_ago
|
||||||
|
).count()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"configurations": config_count,
|
||||||
|
"total_tickets": ticket_count,
|
||||||
|
"tickets_with_summaries": summarized_count,
|
||||||
|
"recent_tickets": recent_tickets,
|
||||||
|
"summary_rate": round((summarized_count / max(ticket_count, 1)) * 100, 1),
|
||||||
|
"last_sync": datetime.now(timezone.utc).isoformat()
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to get statistics: {e}")
|
||||||
|
return {
|
||||||
|
"error": str(e),
|
||||||
|
"configurations": 0,
|
||||||
|
"total_tickets": 0,
|
||||||
|
"tickets_with_summaries": 0,
|
||||||
|
"recent_tickets": 0,
|
||||||
|
"summary_rate": 0.0
|
||||||
|
}
|
||||||
|
|
||||||
|
async def _sync_user_tickets(self, user_id: str) -> int:
|
||||||
|
"""Sync tickets for a specific user"""
|
||||||
|
try:
|
||||||
|
config = await self.get_active_config(user_id)
|
||||||
|
if not config:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Fetch tickets from Zammad
|
||||||
|
tickets = await self.fetch_tickets_from_zammad(config)
|
||||||
|
synced_count = 0
|
||||||
|
|
||||||
|
async with PluginDatabaseSession(self.plugin_id, plugin_db_manager) as db:
|
||||||
|
config_record = await db.query(ZammadConfiguration).filter(
|
||||||
|
ZammadConfiguration.id == config["id"]
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not config_record:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
for ticket_data in tickets:
|
||||||
|
# Check if ticket already exists
|
||||||
|
existing_ticket = await db.query(ZammadTicket).filter(
|
||||||
|
ZammadTicket.zammad_ticket_id == str(ticket_data["id"]),
|
||||||
|
ZammadTicket.configuration_id == config_record.id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if existing_ticket:
|
||||||
|
# Update existing ticket
|
||||||
|
existing_ticket.title = ticket_data.get("title", "")
|
||||||
|
existing_ticket.body = ticket_data.get("body", "")
|
||||||
|
existing_ticket.status = ticket_data.get("state", "")
|
||||||
|
existing_ticket.priority = ticket_data.get("priority", "")
|
||||||
|
existing_ticket.last_synced = datetime.now(timezone.utc)
|
||||||
|
existing_ticket.updated_at = datetime.now(timezone.utc)
|
||||||
|
else:
|
||||||
|
# Create new ticket
|
||||||
|
new_ticket = ZammadTicket(
|
||||||
|
zammad_ticket_id=str(ticket_data["id"]),
|
||||||
|
configuration_id=config_record.id,
|
||||||
|
title=ticket_data.get("title", ""),
|
||||||
|
body=ticket_data.get("body", ""),
|
||||||
|
status=ticket_data.get("state", ""),
|
||||||
|
priority=ticket_data.get("priority", ""),
|
||||||
|
customer_id=str(ticket_data.get("customer_id", "")),
|
||||||
|
group_id=str(ticket_data.get("group_id", "")),
|
||||||
|
last_synced=datetime.now(timezone.utc)
|
||||||
|
)
|
||||||
|
db.add(new_ticket)
|
||||||
|
synced_count += 1
|
||||||
|
|
||||||
|
await db.commit()
|
||||||
|
self.logger.info(f"Synced {synced_count} new tickets for user {user_id}")
|
||||||
|
return synced_count
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to sync tickets for user {user_id}: {e}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
async def _store_ticket_summary(self, ticket_id: str, summary: str, config_id: str):
|
||||||
|
"""Store AI-generated summary in database"""
|
||||||
|
try:
|
||||||
|
async with PluginDatabaseSession(self.plugin_id, plugin_db_manager) as db:
|
||||||
|
ticket = await db.query(ZammadTicket).filter(
|
||||||
|
ZammadTicket.zammad_ticket_id == ticket_id,
|
||||||
|
ZammadTicket.configuration_id == config_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if ticket:
|
||||||
|
ticket.ai_summary = summary
|
||||||
|
ticket.updated_at = datetime.now(timezone.utc)
|
||||||
|
await db.commit()
|
||||||
|
self.logger.info(f"Stored AI summary for ticket {ticket_id}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to store summary for ticket {ticket_id}: {e}")
|
||||||
253
plugins/zammad/manifest.yaml
Normal file
253
plugins/zammad/manifest.yaml
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
apiVersion: "v1"
|
||||||
|
kind: "Plugin"
|
||||||
|
metadata:
|
||||||
|
name: "zammad"
|
||||||
|
version: "1.0.0"
|
||||||
|
description: "Zammad helpdesk integration with AI summarization and ticket management"
|
||||||
|
author: "Enclava Team"
|
||||||
|
license: "MIT"
|
||||||
|
homepage: "https://github.com/enclava/plugins/zammad"
|
||||||
|
repository: "https://github.com/enclava/plugins/zammad"
|
||||||
|
tags:
|
||||||
|
- "helpdesk"
|
||||||
|
- "ticket-management"
|
||||||
|
- "ai-summarization"
|
||||||
|
- "integration"
|
||||||
|
|
||||||
|
spec:
|
||||||
|
runtime:
|
||||||
|
python_version: "3.11"
|
||||||
|
dependencies:
|
||||||
|
- "aiohttp>=3.8.0"
|
||||||
|
- "pydantic>=2.0.0"
|
||||||
|
- "httpx>=0.24.0"
|
||||||
|
- "python-dateutil>=2.8.0"
|
||||||
|
environment_variables:
|
||||||
|
ZAMMAD_TIMEOUT: "30"
|
||||||
|
ZAMMAD_MAX_RETRIES: "3"
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
platform_apis:
|
||||||
|
- "chatbot:invoke"
|
||||||
|
- "rag:query"
|
||||||
|
- "llm:completion"
|
||||||
|
- "llm:embeddings"
|
||||||
|
plugin_scopes:
|
||||||
|
- "tickets:read"
|
||||||
|
- "tickets:write"
|
||||||
|
- "tickets:summarize"
|
||||||
|
- "webhooks:receive"
|
||||||
|
- "config:manage"
|
||||||
|
- "sync:execute"
|
||||||
|
external_domains:
|
||||||
|
- "*.zammad.com"
|
||||||
|
- "*.zammad.org"
|
||||||
|
- "api.zammad.org"
|
||||||
|
|
||||||
|
database:
|
||||||
|
schema: "plugin_zammad"
|
||||||
|
migrations_path: "./migrations"
|
||||||
|
auto_migrate: true
|
||||||
|
|
||||||
|
api_endpoints:
|
||||||
|
- path: "/tickets"
|
||||||
|
methods: ["GET", "POST"]
|
||||||
|
description: "List and create Zammad tickets"
|
||||||
|
auth_required: true
|
||||||
|
|
||||||
|
- path: "/tickets/{ticket_id}"
|
||||||
|
methods: ["GET", "PUT", "DELETE"]
|
||||||
|
description: "Get, update, or delete specific ticket"
|
||||||
|
auth_required: true
|
||||||
|
|
||||||
|
- path: "/tickets/{ticket_id}/summarize"
|
||||||
|
methods: ["POST"]
|
||||||
|
description: "Generate AI summary for ticket"
|
||||||
|
auth_required: true
|
||||||
|
|
||||||
|
- path: "/tickets/{ticket_id}/articles"
|
||||||
|
methods: ["GET", "POST"]
|
||||||
|
description: "Get ticket articles or add new article"
|
||||||
|
auth_required: true
|
||||||
|
|
||||||
|
- path: "/webhooks/ticket-created"
|
||||||
|
methods: ["POST"]
|
||||||
|
description: "Handle Zammad webhook for new tickets"
|
||||||
|
auth_required: false
|
||||||
|
|
||||||
|
- path: "/webhooks/ticket-updated"
|
||||||
|
methods: ["POST"]
|
||||||
|
description: "Handle Zammad webhook for updated tickets"
|
||||||
|
auth_required: false
|
||||||
|
|
||||||
|
- path: "/configurations"
|
||||||
|
methods: ["GET", "POST", "PUT", "DELETE"]
|
||||||
|
description: "Manage Zammad configurations"
|
||||||
|
auth_required: true
|
||||||
|
|
||||||
|
- path: "/configurations/{config_id}/test"
|
||||||
|
methods: ["POST"]
|
||||||
|
description: "Test Zammad configuration connection"
|
||||||
|
auth_required: true
|
||||||
|
|
||||||
|
- path: "/statistics"
|
||||||
|
methods: ["GET"]
|
||||||
|
description: "Get plugin usage statistics"
|
||||||
|
auth_required: true
|
||||||
|
|
||||||
|
- path: "/health"
|
||||||
|
methods: ["GET"]
|
||||||
|
description: "Plugin health check"
|
||||||
|
auth_required: false
|
||||||
|
|
||||||
|
cron_jobs:
|
||||||
|
- name: "sync_tickets"
|
||||||
|
schedule: "0 */2 * * *"
|
||||||
|
function: "sync_tickets_from_zammad"
|
||||||
|
description: "Sync tickets from Zammad every 2 hours"
|
||||||
|
enabled: true
|
||||||
|
timeout_seconds: 600
|
||||||
|
max_retries: 3
|
||||||
|
|
||||||
|
- name: "cleanup_summaries"
|
||||||
|
schedule: "0 3 * * 0"
|
||||||
|
function: "cleanup_old_summaries"
|
||||||
|
description: "Clean up old AI summaries weekly"
|
||||||
|
enabled: true
|
||||||
|
timeout_seconds: 300
|
||||||
|
max_retries: 1
|
||||||
|
|
||||||
|
- name: "health_check"
|
||||||
|
schedule: "*/15 * * * *"
|
||||||
|
function: "check_zammad_connection"
|
||||||
|
description: "Check Zammad API connectivity every 15 minutes"
|
||||||
|
enabled: true
|
||||||
|
timeout_seconds: 60
|
||||||
|
max_retries: 2
|
||||||
|
|
||||||
|
- name: "generate_reports"
|
||||||
|
schedule: "0 9 * * 1"
|
||||||
|
function: "generate_weekly_reports"
|
||||||
|
description: "Generate weekly ticket reports"
|
||||||
|
enabled: false
|
||||||
|
timeout_seconds: 900
|
||||||
|
max_retries: 2
|
||||||
|
|
||||||
|
ui_config:
|
||||||
|
configuration_schema: "./config_schema.json"
|
||||||
|
ui_components: "./ui/components"
|
||||||
|
pages:
|
||||||
|
- name: "dashboard"
|
||||||
|
path: "/plugins/zammad"
|
||||||
|
component: "ZammadDashboard"
|
||||||
|
|
||||||
|
- name: "settings"
|
||||||
|
path: "/plugins/zammad/settings"
|
||||||
|
component: "ZammadSettings"
|
||||||
|
|
||||||
|
- name: "tickets"
|
||||||
|
path: "/plugins/zammad/tickets"
|
||||||
|
component: "ZammadTicketList"
|
||||||
|
|
||||||
|
- name: "analytics"
|
||||||
|
path: "/plugins/zammad/analytics"
|
||||||
|
component: "ZammadAnalytics"
|
||||||
|
|
||||||
|
external_services:
|
||||||
|
allowed_domains:
|
||||||
|
- "*.zammad.com"
|
||||||
|
- "*.zammad.org"
|
||||||
|
- "api.zammad.org"
|
||||||
|
- "help.zammad.com"
|
||||||
|
|
||||||
|
webhooks:
|
||||||
|
- endpoint: "/webhooks/ticket-created"
|
||||||
|
security: "signature_validation"
|
||||||
|
|
||||||
|
- endpoint: "/webhooks/ticket-updated"
|
||||||
|
security: "signature_validation"
|
||||||
|
|
||||||
|
rate_limits:
|
||||||
|
"*.zammad.com": 100
|
||||||
|
"*.zammad.org": 100
|
||||||
|
"api.zammad.org": 200
|
||||||
|
|
||||||
|
config_schema:
|
||||||
|
type: "object"
|
||||||
|
required:
|
||||||
|
- "zammad_url"
|
||||||
|
- "api_token"
|
||||||
|
- "chatbot_id"
|
||||||
|
properties:
|
||||||
|
zammad_url:
|
||||||
|
type: "string"
|
||||||
|
format: "uri"
|
||||||
|
title: "Zammad URL"
|
||||||
|
description: "The base URL of your Zammad instance"
|
||||||
|
examples:
|
||||||
|
- "https://company.zammad.com"
|
||||||
|
- "https://support.example.com"
|
||||||
|
|
||||||
|
api_token:
|
||||||
|
type: "string"
|
||||||
|
title: "API Token"
|
||||||
|
description: "Zammad API token with ticket read/write permissions"
|
||||||
|
minLength: 20
|
||||||
|
format: "password"
|
||||||
|
|
||||||
|
chatbot_id:
|
||||||
|
type: "string"
|
||||||
|
title: "Chatbot ID"
|
||||||
|
description: "Platform chatbot ID for AI summarization"
|
||||||
|
examples:
|
||||||
|
- "zammad-summarizer"
|
||||||
|
- "ticket-assistant"
|
||||||
|
|
||||||
|
ai_summarization:
|
||||||
|
type: "object"
|
||||||
|
title: "AI Summarization Settings"
|
||||||
|
properties:
|
||||||
|
enabled:
|
||||||
|
type: "boolean"
|
||||||
|
title: "Enable AI Summarization"
|
||||||
|
description: "Automatically summarize tickets using AI"
|
||||||
|
default: true
|
||||||
|
|
||||||
|
model:
|
||||||
|
type: "string"
|
||||||
|
title: "AI Model"
|
||||||
|
description: "LLM model to use for summarization"
|
||||||
|
default: "gpt-3.5-turbo"
|
||||||
|
|
||||||
|
max_tokens:
|
||||||
|
type: "integer"
|
||||||
|
title: "Max Summary Tokens"
|
||||||
|
description: "Maximum tokens for AI summary"
|
||||||
|
minimum: 50
|
||||||
|
maximum: 500
|
||||||
|
default: 150
|
||||||
|
|
||||||
|
draft_settings:
|
||||||
|
type: "object"
|
||||||
|
title: "AI Draft Settings"
|
||||||
|
properties:
|
||||||
|
enabled:
|
||||||
|
type: "boolean"
|
||||||
|
title: "Enable AI Drafts"
|
||||||
|
description: "Generate AI draft responses for tickets"
|
||||||
|
default: false
|
||||||
|
|
||||||
|
model:
|
||||||
|
type: "string"
|
||||||
|
title: "Draft Model"
|
||||||
|
description: "LLM model to use for draft generation"
|
||||||
|
default: "gpt-3.5-turbo"
|
||||||
|
|
||||||
|
max_tokens:
|
||||||
|
type: "integer"
|
||||||
|
title: "Max Draft Tokens"
|
||||||
|
description: "Maximum tokens for AI draft responses"
|
||||||
|
minimum: 100
|
||||||
|
maximum: 1000
|
||||||
|
default: 300
|
||||||
|
|
||||||
85
plugins/zammad/migrations/env.py
Normal file
85
plugins/zammad/migrations/env.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
"""Alembic environment for Zammad plugin"""
|
||||||
|
from logging.config import fileConfig
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
from alembic import context
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
from main import Base
|
||||||
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Get database URL from environment variable
|
||||||
|
url = os.getenv("DATABASE_URL")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Get database URL from environment variable
|
||||||
|
database_url = os.getenv("DATABASE_URL")
|
||||||
|
|
||||||
|
configuration = config.get_section(config.config_ini_section)
|
||||||
|
configuration["sqlalchemy.url"] = database_url
|
||||||
|
|
||||||
|
connectable = engine_from_config(
|
||||||
|
configuration,
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection, target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
||||||
24
plugins/zammad/migrations/script.py.mako
Normal file
24
plugins/zammad/migrations/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
${downgrades if downgrades else "pass"}
|
||||||
112
plugins/zammad/migrations/versions/001_initial_zammad_schema.py
Normal file
112
plugins/zammad/migrations/versions/001_initial_zammad_schema.py
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
"""Initial Zammad plugin schema
|
||||||
|
|
||||||
|
Revision ID: 001
|
||||||
|
Revises:
|
||||||
|
Create Date: 2024-12-22 12:00:00.000000
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '001'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Create initial Zammad plugin schema"""
|
||||||
|
|
||||||
|
# Create zammad_configurations table
|
||||||
|
op.create_table(
|
||||||
|
'zammad_configurations',
|
||||||
|
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
|
||||||
|
sa.Column('user_id', sa.String(255), nullable=False),
|
||||||
|
sa.Column('name', sa.String(100), nullable=False),
|
||||||
|
sa.Column('zammad_url', sa.String(500), nullable=False),
|
||||||
|
sa.Column('api_token_encrypted', sa.Text(), nullable=False),
|
||||||
|
sa.Column('chatbot_id', sa.String(100), nullable=False),
|
||||||
|
sa.Column('is_active', sa.Boolean(), nullable=False, server_default=sa.text('TRUE')),
|
||||||
|
sa.Column('ai_summarization_enabled', sa.Boolean(), nullable=False, server_default=sa.text('TRUE')),
|
||||||
|
sa.Column('auto_summarize', sa.Boolean(), nullable=False, server_default=sa.text('TRUE')),
|
||||||
|
sa.Column('sync_enabled', sa.Boolean(), nullable=False, server_default=sa.text('TRUE')),
|
||||||
|
sa.Column('sync_interval_hours', sa.Integer(), nullable=False, server_default=sa.text('2')),
|
||||||
|
sa.Column('created_at', sa.TIMESTAMP(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||||
|
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create zammad_tickets table
|
||||||
|
op.create_table(
|
||||||
|
'zammad_tickets',
|
||||||
|
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
|
||||||
|
sa.Column('zammad_ticket_id', sa.String(50), nullable=False),
|
||||||
|
sa.Column('configuration_id', UUID(as_uuid=True), nullable=True),
|
||||||
|
sa.Column('title', sa.String(500), nullable=False),
|
||||||
|
sa.Column('body', sa.Text(), nullable=True),
|
||||||
|
sa.Column('status', sa.String(50), nullable=True),
|
||||||
|
sa.Column('priority', sa.String(50), nullable=True),
|
||||||
|
sa.Column('customer_id', sa.String(50), nullable=True),
|
||||||
|
sa.Column('group_id', sa.String(50), nullable=True),
|
||||||
|
sa.Column('ai_summary', sa.Text(), nullable=True),
|
||||||
|
sa.Column('last_synced', sa.TIMESTAMP(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||||
|
sa.Column('created_at', sa.TIMESTAMP(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||||
|
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
||||||
|
sa.ForeignKeyConstraint(['configuration_id'], ['zammad_configurations.id'], ondelete='CASCADE'),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes for performance
|
||||||
|
op.create_index('idx_zammad_configurations_user_id', 'zammad_configurations', ['user_id'])
|
||||||
|
op.create_index('idx_zammad_configurations_user_active', 'zammad_configurations', ['user_id', 'is_active'])
|
||||||
|
|
||||||
|
op.create_index('idx_zammad_tickets_zammad_id', 'zammad_tickets', ['zammad_ticket_id'])
|
||||||
|
op.create_index('idx_zammad_tickets_config_id', 'zammad_tickets', ['configuration_id'])
|
||||||
|
op.create_index('idx_zammad_tickets_status', 'zammad_tickets', ['status'])
|
||||||
|
op.create_index('idx_zammad_tickets_last_synced', 'zammad_tickets', ['last_synced'])
|
||||||
|
|
||||||
|
# Create updated_at trigger function if it doesn't exist
|
||||||
|
op.execute("""
|
||||||
|
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
||||||
|
RETURNS TRIGGER AS $$
|
||||||
|
BEGIN
|
||||||
|
NEW.updated_at = CURRENT_TIMESTAMP;
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE 'plpgsql';
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Create triggers to automatically update updated_at columns
|
||||||
|
op.execute("""
|
||||||
|
CREATE TRIGGER update_zammad_configurations_updated_at
|
||||||
|
BEFORE UPDATE ON zammad_configurations
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
""")
|
||||||
|
|
||||||
|
op.execute("""
|
||||||
|
CREATE TRIGGER update_zammad_tickets_updated_at
|
||||||
|
BEFORE UPDATE ON zammad_tickets
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Drop Zammad plugin schema"""
|
||||||
|
|
||||||
|
# Drop triggers first
|
||||||
|
op.execute("DROP TRIGGER IF EXISTS update_zammad_tickets_updated_at ON zammad_tickets;")
|
||||||
|
op.execute("DROP TRIGGER IF EXISTS update_zammad_configurations_updated_at ON zammad_configurations;")
|
||||||
|
|
||||||
|
# Drop indexes
|
||||||
|
op.drop_index('idx_zammad_tickets_last_synced')
|
||||||
|
op.drop_index('idx_zammad_tickets_status')
|
||||||
|
op.drop_index('idx_zammad_tickets_config_id')
|
||||||
|
op.drop_index('idx_zammad_tickets_zammad_id')
|
||||||
|
op.drop_index('idx_zammad_configurations_user_active')
|
||||||
|
op.drop_index('idx_zammad_configurations_user_id')
|
||||||
|
|
||||||
|
# Drop tables (tickets first due to foreign key)
|
||||||
|
op.drop_table('zammad_tickets')
|
||||||
|
op.drop_table('zammad_configurations')
|
||||||
|
|
||||||
|
# Note: We don't drop the update_updated_at_column function as it might be used by other tables
|
||||||
4
plugins/zammad/requirements.txt
Normal file
4
plugins/zammad/requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
aiohttp>=3.8.0
|
||||||
|
pydantic>=2.0.0
|
||||||
|
httpx>=0.24.0
|
||||||
|
python-dateutil>=2.8.0
|
||||||
414
plugins/zammad/ui/components/ZammadDashboard.tsx
Normal file
414
plugins/zammad/ui/components/ZammadDashboard.tsx
Normal file
@@ -0,0 +1,414 @@
|
|||||||
|
/**
|
||||||
|
* Zammad Plugin Dashboard Component
|
||||||
|
* Main dashboard for Zammad plugin showing tickets, statistics, and quick actions
|
||||||
|
*/
|
||||||
|
import React, { useState, useEffect } from 'react';
|
||||||
|
import {
|
||||||
|
Box,
|
||||||
|
Grid,
|
||||||
|
Card,
|
||||||
|
CardContent,
|
||||||
|
Typography,
|
||||||
|
Button,
|
||||||
|
Chip,
|
||||||
|
Alert,
|
||||||
|
Table,
|
||||||
|
TableBody,
|
||||||
|
TableCell,
|
||||||
|
TableHead,
|
||||||
|
TableRow,
|
||||||
|
IconButton,
|
||||||
|
Dialog,
|
||||||
|
DialogTitle,
|
||||||
|
DialogContent,
|
||||||
|
DialogActions,
|
||||||
|
LinearProgress,
|
||||||
|
Tooltip
|
||||||
|
} from '@mui/material';
|
||||||
|
import {
|
||||||
|
Refresh as RefreshIcon,
|
||||||
|
Sync as SyncIcon,
|
||||||
|
Analytics as AnalyticsIcon,
|
||||||
|
Assignment as TicketIcon,
|
||||||
|
AutoAwesome as AIIcon,
|
||||||
|
Settings as SettingsIcon,
|
||||||
|
OpenInNew as OpenIcon
|
||||||
|
} from '@mui/icons-material';
|
||||||
|
|
||||||
|
interface ZammadTicket {
|
||||||
|
id: string;
|
||||||
|
title: string;
|
||||||
|
status: string;
|
||||||
|
priority: string;
|
||||||
|
customer_id: string;
|
||||||
|
created_at: string;
|
||||||
|
ai_summary?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ZammadStats {
|
||||||
|
configurations: number;
|
||||||
|
total_tickets: number;
|
||||||
|
tickets_with_summaries: number;
|
||||||
|
recent_tickets: number;
|
||||||
|
summary_rate: number;
|
||||||
|
last_sync: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const ZammadDashboard: React.FC = () => {
|
||||||
|
const [tickets, setTickets] = useState<ZammadTicket[]>([]);
|
||||||
|
const [stats, setStats] = useState<ZammadStats | null>(null);
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const [selectedTicket, setSelectedTicket] = useState<ZammadTicket | null>(null);
|
||||||
|
const [dialogOpen, setDialogOpen] = useState(false);
|
||||||
|
const [syncing, setSyncing] = useState(false);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
loadDashboardData();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const loadDashboardData = async () => {
|
||||||
|
setLoading(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Load statistics
|
||||||
|
const statsResponse = await fetch('/api/v1/plugins/zammad/statistics');
|
||||||
|
if (statsResponse.ok) {
|
||||||
|
const statsData = await statsResponse.json();
|
||||||
|
setStats(statsData);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load recent tickets
|
||||||
|
const ticketsResponse = await fetch('/api/v1/plugins/zammad/tickets?limit=10');
|
||||||
|
if (ticketsResponse.ok) {
|
||||||
|
const ticketsData = await ticketsResponse.json();
|
||||||
|
setTickets(ticketsData.tickets || []);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
setError('Failed to load dashboard data');
|
||||||
|
console.error('Dashboard load error:', err);
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSyncTickets = async () => {
|
||||||
|
setSyncing(true);
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/v1/plugins/zammad/tickets/sync', {
|
||||||
|
method: 'GET'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
const result = await response.json();
|
||||||
|
// Reload dashboard data after sync
|
||||||
|
await loadDashboardData();
|
||||||
|
// Show success message with sync count
|
||||||
|
console.log(`Synced ${result.synced_count} tickets`);
|
||||||
|
} else {
|
||||||
|
throw new Error('Sync failed');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setError('Failed to sync tickets');
|
||||||
|
} finally {
|
||||||
|
setSyncing(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleTicketClick = (ticket: ZammadTicket) => {
|
||||||
|
setSelectedTicket(ticket);
|
||||||
|
setDialogOpen(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSummarizeTicket = async (ticketId: string) => {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/v1/plugins/zammad/tickets/${ticketId}/summarize`, {
|
||||||
|
method: 'POST'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
// Show success message
|
||||||
|
console.log('Summarization started');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Summarization failed:', err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStatusColor = (status: string) => {
|
||||||
|
switch (status.toLowerCase()) {
|
||||||
|
case 'open': return 'error';
|
||||||
|
case 'pending': return 'warning';
|
||||||
|
case 'closed': return 'success';
|
||||||
|
default: return 'default';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getPriorityColor = (priority: string) => {
|
||||||
|
switch (priority) {
|
||||||
|
case '3 high': return 'error';
|
||||||
|
case '2 normal': return 'warning';
|
||||||
|
case '1 low': return 'success';
|
||||||
|
default: return 'default';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Box>
|
||||||
|
{/* Header */}
|
||||||
|
<Box display="flex" justifyContent="space-between" alignItems="center" mb={3}>
|
||||||
|
<Typography variant="h4" component="h1">
|
||||||
|
Zammad Dashboard
|
||||||
|
</Typography>
|
||||||
|
|
||||||
|
<Box display="flex" gap={2}>
|
||||||
|
<Button
|
||||||
|
variant="outlined"
|
||||||
|
startIcon={<SyncIcon />}
|
||||||
|
onClick={handleSyncTickets}
|
||||||
|
disabled={syncing}
|
||||||
|
>
|
||||||
|
{syncing ? 'Syncing...' : 'Sync Tickets'}
|
||||||
|
</Button>
|
||||||
|
|
||||||
|
<Button
|
||||||
|
variant="outlined"
|
||||||
|
startIcon={<RefreshIcon />}
|
||||||
|
onClick={loadDashboardData}
|
||||||
|
disabled={loading}
|
||||||
|
>
|
||||||
|
Refresh
|
||||||
|
</Button>
|
||||||
|
</Box>
|
||||||
|
</Box>
|
||||||
|
|
||||||
|
{error && (
|
||||||
|
<Alert severity="error" sx={{ mb: 3 }}>
|
||||||
|
{error}
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{loading && <LinearProgress sx={{ mb: 3 }} />}
|
||||||
|
|
||||||
|
{/* Statistics Cards */}
|
||||||
|
{stats && (
|
||||||
|
<Grid container spacing={3} sx={{ mb: 4 }}>
|
||||||
|
<Grid item xs={12} sm={6} md={3}>
|
||||||
|
<Card>
|
||||||
|
<CardContent>
|
||||||
|
<Box display="flex" alignItems="center" gap={2}>
|
||||||
|
<TicketIcon color="primary" />
|
||||||
|
<Box>
|
||||||
|
<Typography variant="h6">{stats.total_tickets}</Typography>
|
||||||
|
<Typography variant="body2" color="text.secondary">
|
||||||
|
Total Tickets
|
||||||
|
</Typography>
|
||||||
|
</Box>
|
||||||
|
</Box>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</Grid>
|
||||||
|
|
||||||
|
<Grid item xs={12} sm={6} md={3}>
|
||||||
|
<Card>
|
||||||
|
<CardContent>
|
||||||
|
<Box display="flex" alignItems="center" gap={2}>
|
||||||
|
<AIIcon color="secondary" />
|
||||||
|
<Box>
|
||||||
|
<Typography variant="h6">{stats.tickets_with_summaries}</Typography>
|
||||||
|
<Typography variant="body2" color="text.secondary">
|
||||||
|
AI Summaries
|
||||||
|
</Typography>
|
||||||
|
</Box>
|
||||||
|
</Box>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</Grid>
|
||||||
|
|
||||||
|
<Grid item xs={12} sm={6} md={3}>
|
||||||
|
<Card>
|
||||||
|
<CardContent>
|
||||||
|
<Box display="flex" alignItems="center" gap={2}>
|
||||||
|
<AnalyticsIcon color="success" />
|
||||||
|
<Box>
|
||||||
|
<Typography variant="h6">{stats.summary_rate}%</Typography>
|
||||||
|
<Typography variant="body2" color="text.secondary">
|
||||||
|
Summary Rate
|
||||||
|
</Typography>
|
||||||
|
</Box>
|
||||||
|
</Box>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</Grid>
|
||||||
|
|
||||||
|
<Grid item xs={12} sm={6} md={3}>
|
||||||
|
<Card>
|
||||||
|
<CardContent>
|
||||||
|
<Box display="flex" alignItems="center" gap={2}>
|
||||||
|
<RefreshIcon color="info" />
|
||||||
|
<Box>
|
||||||
|
<Typography variant="h6">{stats.recent_tickets}</Typography>
|
||||||
|
<Typography variant="body2" color="text.secondary">
|
||||||
|
Recent (7 days)
|
||||||
|
</Typography>
|
||||||
|
</Box>
|
||||||
|
</Box>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</Grid>
|
||||||
|
</Grid>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Recent Tickets Table */}
|
||||||
|
<Card>
|
||||||
|
<CardContent>
|
||||||
|
<Box display="flex" justifyContent="space-between" alignItems="center" mb={2}>
|
||||||
|
<Typography variant="h6">Recent Tickets</Typography>
|
||||||
|
<Button
|
||||||
|
size="small"
|
||||||
|
endIcon={<OpenIcon />}
|
||||||
|
onClick={() => window.location.hash = '#/plugins/zammad/tickets'}
|
||||||
|
>
|
||||||
|
View All
|
||||||
|
</Button>
|
||||||
|
</Box>
|
||||||
|
|
||||||
|
{tickets.length === 0 ? (
|
||||||
|
<Typography variant="body2" color="text.secondary" textAlign="center" py={4}>
|
||||||
|
No tickets found. Try syncing with Zammad.
|
||||||
|
</Typography>
|
||||||
|
) : (
|
||||||
|
<Table>
|
||||||
|
<TableHead>
|
||||||
|
<TableRow>
|
||||||
|
<TableCell>Title</TableCell>
|
||||||
|
<TableCell>Status</TableCell>
|
||||||
|
<TableCell>Priority</TableCell>
|
||||||
|
<TableCell>AI Summary</TableCell>
|
||||||
|
<TableCell>Actions</TableCell>
|
||||||
|
</TableRow>
|
||||||
|
</TableHead>
|
||||||
|
<TableBody>
|
||||||
|
{tickets.map((ticket) => (
|
||||||
|
<TableRow key={ticket.id} hover onClick={() => handleTicketClick(ticket)}>
|
||||||
|
<TableCell>
|
||||||
|
<Typography variant="body2" noWrap sx={{ maxWidth: 200 }}>
|
||||||
|
{ticket.title}
|
||||||
|
</Typography>
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<Chip
|
||||||
|
label={ticket.status}
|
||||||
|
color={getStatusColor(ticket.status) as any}
|
||||||
|
size="small"
|
||||||
|
/>
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<Chip
|
||||||
|
label={ticket.priority}
|
||||||
|
color={getPriorityColor(ticket.priority) as any}
|
||||||
|
size="small"
|
||||||
|
variant="outlined"
|
||||||
|
/>
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
{ticket.ai_summary ? (
|
||||||
|
<Chip label="Available" color="success" size="small" />
|
||||||
|
) : (
|
||||||
|
<Chip label="None" color="default" size="small" />
|
||||||
|
)}
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<Tooltip title="Generate AI Summary">
|
||||||
|
<IconButton
|
||||||
|
size="small"
|
||||||
|
onClick={(e) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
handleSummarizeTicket(ticket.id);
|
||||||
|
}}
|
||||||
|
disabled={!!ticket.ai_summary}
|
||||||
|
>
|
||||||
|
<AIIcon />
|
||||||
|
</IconButton>
|
||||||
|
</Tooltip>
|
||||||
|
</TableCell>
|
||||||
|
</TableRow>
|
||||||
|
))}
|
||||||
|
</TableBody>
|
||||||
|
</Table>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Ticket Detail Dialog */}
|
||||||
|
<Dialog
|
||||||
|
open={dialogOpen}
|
||||||
|
onClose={() => setDialogOpen(false)}
|
||||||
|
maxWidth="md"
|
||||||
|
fullWidth
|
||||||
|
>
|
||||||
|
<DialogTitle>
|
||||||
|
Ticket Details
|
||||||
|
</DialogTitle>
|
||||||
|
|
||||||
|
<DialogContent>
|
||||||
|
{selectedTicket && (
|
||||||
|
<Box>
|
||||||
|
<Typography variant="h6" gutterBottom>
|
||||||
|
{selectedTicket.title}
|
||||||
|
</Typography>
|
||||||
|
|
||||||
|
<Box display="flex" gap={2} mb={2}>
|
||||||
|
<Chip label={selectedTicket.status} color={getStatusColor(selectedTicket.status) as any} />
|
||||||
|
<Chip label={selectedTicket.priority} color={getPriorityColor(selectedTicket.priority) as any} />
|
||||||
|
</Box>
|
||||||
|
|
||||||
|
<Typography variant="body2" color="text.secondary" paragraph>
|
||||||
|
Customer: {selectedTicket.customer_id}
|
||||||
|
</Typography>
|
||||||
|
|
||||||
|
<Typography variant="body2" color="text.secondary" paragraph>
|
||||||
|
Created: {new Date(selectedTicket.created_at).toLocaleString()}
|
||||||
|
</Typography>
|
||||||
|
|
||||||
|
{selectedTicket.ai_summary && (
|
||||||
|
<Box mt={2}>
|
||||||
|
<Typography variant="subtitle2" gutterBottom>
|
||||||
|
AI Summary
|
||||||
|
</Typography>
|
||||||
|
<Typography variant="body2" sx={{
|
||||||
|
backgroundColor: 'grey.100',
|
||||||
|
p: 2,
|
||||||
|
borderRadius: 1
|
||||||
|
}}>
|
||||||
|
{selectedTicket.ai_summary}
|
||||||
|
</Typography>
|
||||||
|
</Box>
|
||||||
|
)}
|
||||||
|
</Box>
|
||||||
|
)}
|
||||||
|
</DialogContent>
|
||||||
|
|
||||||
|
<DialogActions>
|
||||||
|
<Button onClick={() => setDialogOpen(false)}>
|
||||||
|
Close
|
||||||
|
</Button>
|
||||||
|
{selectedTicket && !selectedTicket.ai_summary && (
|
||||||
|
<Button
|
||||||
|
variant="contained"
|
||||||
|
startIcon={<AIIcon />}
|
||||||
|
onClick={() => {
|
||||||
|
handleSummarizeTicket(selectedTicket.id);
|
||||||
|
setDialogOpen(false);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Generate Summary
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</DialogActions>
|
||||||
|
</Dialog>
|
||||||
|
</Box>
|
||||||
|
);
|
||||||
|
};
|
||||||
512
plugins/zammad/ui/components/ZammadSettings.tsx
Normal file
512
plugins/zammad/ui/components/ZammadSettings.tsx
Normal file
@@ -0,0 +1,512 @@
|
|||||||
|
/**
|
||||||
|
* Zammad Plugin Settings Component
|
||||||
|
* Configuration interface for Zammad plugin
|
||||||
|
*/
|
||||||
|
import React, { useState, useEffect } from 'react';
|
||||||
|
import {
|
||||||
|
Box,
|
||||||
|
Card,
|
||||||
|
CardContent,
|
||||||
|
Typography,
|
||||||
|
TextField,
|
||||||
|
Button,
|
||||||
|
Switch,
|
||||||
|
FormControlLabel,
|
||||||
|
FormGroup,
|
||||||
|
Select,
|
||||||
|
MenuItem,
|
||||||
|
FormControl,
|
||||||
|
InputLabel,
|
||||||
|
Alert,
|
||||||
|
Divider,
|
||||||
|
Accordion,
|
||||||
|
AccordionSummary,
|
||||||
|
AccordionDetails,
|
||||||
|
Chip,
|
||||||
|
LinearProgress
|
||||||
|
} from '@mui/material';
|
||||||
|
import {
|
||||||
|
ExpandMore as ExpandMoreIcon,
|
||||||
|
Save as SaveIcon,
|
||||||
|
TestTube as TestIcon,
|
||||||
|
Security as SecurityIcon,
|
||||||
|
Sync as SyncIcon,
|
||||||
|
Smart as AIIcon
|
||||||
|
} from '@mui/icons-material';
|
||||||
|
|
||||||
|
interface ZammadConfig {
|
||||||
|
name: string;
|
||||||
|
zammad_url: string;
|
||||||
|
api_token: string;
|
||||||
|
chatbot_id: string;
|
||||||
|
ai_summarization: {
|
||||||
|
enabled: boolean;
|
||||||
|
model: string;
|
||||||
|
max_tokens: number;
|
||||||
|
auto_summarize: boolean;
|
||||||
|
};
|
||||||
|
sync_settings: {
|
||||||
|
enabled: boolean;
|
||||||
|
interval_hours: number;
|
||||||
|
sync_articles: boolean;
|
||||||
|
max_tickets_per_sync: number;
|
||||||
|
};
|
||||||
|
webhook_settings: {
|
||||||
|
secret: string;
|
||||||
|
enabled_events: string[];
|
||||||
|
};
|
||||||
|
notification_settings: {
|
||||||
|
email_notifications: boolean;
|
||||||
|
slack_webhook_url: string;
|
||||||
|
notification_events: string[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const defaultConfig: ZammadConfig = {
|
||||||
|
name: '',
|
||||||
|
zammad_url: '',
|
||||||
|
api_token: '',
|
||||||
|
chatbot_id: '',
|
||||||
|
ai_summarization: {
|
||||||
|
enabled: true,
|
||||||
|
model: 'gpt-3.5-turbo',
|
||||||
|
max_tokens: 150,
|
||||||
|
auto_summarize: true
|
||||||
|
},
|
||||||
|
sync_settings: {
|
||||||
|
enabled: true,
|
||||||
|
interval_hours: 2,
|
||||||
|
sync_articles: true,
|
||||||
|
max_tickets_per_sync: 100
|
||||||
|
},
|
||||||
|
webhook_settings: {
|
||||||
|
secret: '',
|
||||||
|
enabled_events: ['ticket.create', 'ticket.update']
|
||||||
|
},
|
||||||
|
notification_settings: {
|
||||||
|
email_notifications: false,
|
||||||
|
slack_webhook_url: '',
|
||||||
|
notification_events: ['sync_error', 'api_error']
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const ZammadSettings: React.FC = () => {
|
||||||
|
const [config, setConfig] = useState<ZammadConfig>(defaultConfig);
|
||||||
|
const [loading, setLoading] = useState(false);
|
||||||
|
const [saving, setSaving] = useState(false);
|
||||||
|
const [testing, setTesting] = useState(false);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
const [success, setSuccess] = useState<string | null>(null);
|
||||||
|
const [testResult, setTestResult] = useState<any>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
loadConfiguration();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const loadConfiguration = async () => {
|
||||||
|
setLoading(true);
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/v1/plugins/zammad/configurations');
|
||||||
|
if (response.ok) {
|
||||||
|
const data = await response.json();
|
||||||
|
if (data.configurations.length > 0) {
|
||||||
|
// Load the first (active) configuration
|
||||||
|
const loadedConfig = data.configurations[0];
|
||||||
|
setConfig({
|
||||||
|
...defaultConfig,
|
||||||
|
...loadedConfig
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setError('Failed to load configuration');
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleConfigChange = (path: string, value: any) => {
|
||||||
|
setConfig(prev => {
|
||||||
|
const newConfig = { ...prev };
|
||||||
|
const keys = path.split('.');
|
||||||
|
let current: any = newConfig;
|
||||||
|
|
||||||
|
for (let i = 0; i < keys.length - 1; i++) {
|
||||||
|
current = current[keys[i]];
|
||||||
|
}
|
||||||
|
|
||||||
|
current[keys[keys.length - 1]] = value;
|
||||||
|
return newConfig;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleTestConnection = async () => {
|
||||||
|
setTesting(true);
|
||||||
|
setTestResult(null);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/v1/plugins/zammad/configurations/test', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
zammad_url: config.zammad_url,
|
||||||
|
api_token: config.api_token
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
setTestResult(result);
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
setError(`Connection test failed: ${result.error}`);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setError('Connection test failed');
|
||||||
|
} finally {
|
||||||
|
setTesting(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSaveConfiguration = async () => {
|
||||||
|
setSaving(true);
|
||||||
|
setError(null);
|
||||||
|
setSuccess(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/v1/plugins/zammad/configurations', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify(config)
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
setSuccess('Configuration saved successfully');
|
||||||
|
} else {
|
||||||
|
const errorData = await response.json();
|
||||||
|
setError(errorData.detail || 'Failed to save configuration');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setError('Failed to save configuration');
|
||||||
|
} finally {
|
||||||
|
setSaving(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleArrayToggle = (path: string, value: string) => {
|
||||||
|
const currentArray = path.split('.').reduce((obj, key) => obj[key], config) as string[];
|
||||||
|
const newArray = currentArray.includes(value)
|
||||||
|
? currentArray.filter(item => item !== value)
|
||||||
|
: [...currentArray, value];
|
||||||
|
handleConfigChange(path, newArray);
|
||||||
|
};
|
||||||
|
|
||||||
|
if (loading) {
|
||||||
|
return (
|
||||||
|
<Box>
|
||||||
|
<Typography variant="h4" gutterBottom>Zammad Settings</Typography>
|
||||||
|
<LinearProgress />
|
||||||
|
</Box>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Box>
|
||||||
|
<Box display="flex" justifyContent="space-between" alignItems="center" mb={3}>
|
||||||
|
<Typography variant="h4" component="h1">
|
||||||
|
Zammad Settings
|
||||||
|
</Typography>
|
||||||
|
|
||||||
|
<Box display="flex" gap={2}>
|
||||||
|
<Button
|
||||||
|
variant="outlined"
|
||||||
|
startIcon={<TestIcon />}
|
||||||
|
onClick={handleTestConnection}
|
||||||
|
disabled={testing || !config.zammad_url || !config.api_token}
|
||||||
|
>
|
||||||
|
{testing ? 'Testing...' : 'Test Connection'}
|
||||||
|
</Button>
|
||||||
|
|
||||||
|
<Button
|
||||||
|
variant="contained"
|
||||||
|
startIcon={<SaveIcon />}
|
||||||
|
onClick={handleSaveConfiguration}
|
||||||
|
disabled={saving}
|
||||||
|
>
|
||||||
|
{saving ? 'Saving...' : 'Save Configuration'}
|
||||||
|
</Button>
|
||||||
|
</Box>
|
||||||
|
</Box>
|
||||||
|
|
||||||
|
{error && (
|
||||||
|
<Alert severity="error" sx={{ mb: 3 }}>
|
||||||
|
{error}
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{success && (
|
||||||
|
<Alert severity="success" sx={{ mb: 3 }}>
|
||||||
|
{success}
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{testResult && (
|
||||||
|
<Alert
|
||||||
|
severity={testResult.success ? 'success' : 'error'}
|
||||||
|
sx={{ mb: 3 }}
|
||||||
|
>
|
||||||
|
{testResult.success
|
||||||
|
? `Connection successful! User: ${testResult.user}, Version: ${testResult.zammad_version}`
|
||||||
|
: `Connection failed: ${testResult.error}`
|
||||||
|
}
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Basic Configuration */}
|
||||||
|
<Card sx={{ mb: 3 }}>
|
||||||
|
<CardContent>
|
||||||
|
<Typography variant="h6" gutterBottom>
|
||||||
|
Basic Configuration
|
||||||
|
</Typography>
|
||||||
|
|
||||||
|
<Box display="flex" flexDirection="column" gap={3}>
|
||||||
|
<TextField
|
||||||
|
label="Configuration Name"
|
||||||
|
value={config.name}
|
||||||
|
onChange={(e) => handleConfigChange('name', e.target.value)}
|
||||||
|
fullWidth
|
||||||
|
required
|
||||||
|
/>
|
||||||
|
|
||||||
|
<TextField
|
||||||
|
label="Zammad URL"
|
||||||
|
value={config.zammad_url}
|
||||||
|
onChange={(e) => handleConfigChange('zammad_url', e.target.value)}
|
||||||
|
fullWidth
|
||||||
|
required
|
||||||
|
placeholder="https://company.zammad.com"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<TextField
|
||||||
|
label="API Token"
|
||||||
|
type="password"
|
||||||
|
value={config.api_token}
|
||||||
|
onChange={(e) => handleConfigChange('api_token', e.target.value)}
|
||||||
|
fullWidth
|
||||||
|
required
|
||||||
|
helperText="Zammad API token with ticket read/write permissions"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<TextField
|
||||||
|
label="Chatbot ID"
|
||||||
|
value={config.chatbot_id}
|
||||||
|
onChange={(e) => handleConfigChange('chatbot_id', e.target.value)}
|
||||||
|
fullWidth
|
||||||
|
required
|
||||||
|
helperText="Platform chatbot ID for AI summarization"
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* AI Summarization Settings */}
|
||||||
|
<Accordion sx={{ mb: 2 }}>
|
||||||
|
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
|
||||||
|
<Box display="flex" alignItems="center" gap={2}>
|
||||||
|
<AIIcon />
|
||||||
|
<Typography variant="h6">AI Summarization</Typography>
|
||||||
|
<Chip
|
||||||
|
label={config.ai_summarization.enabled ? 'Enabled' : 'Disabled'}
|
||||||
|
color={config.ai_summarization.enabled ? 'success' : 'default'}
|
||||||
|
size="small"
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
</AccordionSummary>
|
||||||
|
<AccordionDetails>
|
||||||
|
<Box display="flex" flexDirection="column" gap={3}>
|
||||||
|
<FormControlLabel
|
||||||
|
control={
|
||||||
|
<Switch
|
||||||
|
checked={config.ai_summarization.enabled}
|
||||||
|
onChange={(e) => handleConfigChange('ai_summarization.enabled', e.target.checked)}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
label="Enable AI Summarization"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<FormControl fullWidth>
|
||||||
|
<InputLabel>AI Model</InputLabel>
|
||||||
|
<Select
|
||||||
|
value={config.ai_summarization.model}
|
||||||
|
onChange={(e) => handleConfigChange('ai_summarization.model', e.target.value)}
|
||||||
|
label="AI Model"
|
||||||
|
>
|
||||||
|
<MenuItem value="gpt-3.5-turbo">GPT-3.5 Turbo</MenuItem>
|
||||||
|
<MenuItem value="gpt-4">GPT-4</MenuItem>
|
||||||
|
<MenuItem value="claude-3-sonnet">Claude 3 Sonnet</MenuItem>
|
||||||
|
</Select>
|
||||||
|
</FormControl>
|
||||||
|
|
||||||
|
<TextField
|
||||||
|
label="Max Summary Tokens"
|
||||||
|
type="number"
|
||||||
|
value={config.ai_summarization.max_tokens}
|
||||||
|
onChange={(e) => handleConfigChange('ai_summarization.max_tokens', parseInt(e.target.value))}
|
||||||
|
inputProps={{ min: 50, max: 500 }}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<FormControlLabel
|
||||||
|
control={
|
||||||
|
<Switch
|
||||||
|
checked={config.ai_summarization.auto_summarize}
|
||||||
|
onChange={(e) => handleConfigChange('ai_summarization.auto_summarize', e.target.checked)}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
label="Auto-summarize New Tickets"
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
</AccordionDetails>
|
||||||
|
</Accordion>
|
||||||
|
|
||||||
|
{/* Sync Settings */}
|
||||||
|
<Accordion sx={{ mb: 2 }}>
|
||||||
|
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
|
||||||
|
<Box display="flex" alignItems="center" gap={2}>
|
||||||
|
<SyncIcon />
|
||||||
|
<Typography variant="h6">Sync Settings</Typography>
|
||||||
|
<Chip
|
||||||
|
label={config.sync_settings.enabled ? 'Enabled' : 'Disabled'}
|
||||||
|
color={config.sync_settings.enabled ? 'success' : 'default'}
|
||||||
|
size="small"
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
</AccordionSummary>
|
||||||
|
<AccordionDetails>
|
||||||
|
<Box display="flex" flexDirection="column" gap={3}>
|
||||||
|
<FormControlLabel
|
||||||
|
control={
|
||||||
|
<Switch
|
||||||
|
checked={config.sync_settings.enabled}
|
||||||
|
onChange={(e) => handleConfigChange('sync_settings.enabled', e.target.checked)}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
label="Enable Automatic Sync"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<TextField
|
||||||
|
label="Sync Interval (Hours)"
|
||||||
|
type="number"
|
||||||
|
value={config.sync_settings.interval_hours}
|
||||||
|
onChange={(e) => handleConfigChange('sync_settings.interval_hours', parseInt(e.target.value))}
|
||||||
|
inputProps={{ min: 1, max: 24 }}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<FormControlLabel
|
||||||
|
control={
|
||||||
|
<Switch
|
||||||
|
checked={config.sync_settings.sync_articles}
|
||||||
|
onChange={(e) => handleConfigChange('sync_settings.sync_articles', e.target.checked)}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
label="Sync Ticket Articles"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<TextField
|
||||||
|
label="Max Tickets Per Sync"
|
||||||
|
type="number"
|
||||||
|
value={config.sync_settings.max_tickets_per_sync}
|
||||||
|
onChange={(e) => handleConfigChange('sync_settings.max_tickets_per_sync', parseInt(e.target.value))}
|
||||||
|
inputProps={{ min: 10, max: 1000 }}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
</AccordionDetails>
|
||||||
|
</Accordion>
|
||||||
|
|
||||||
|
{/* Webhook Settings */}
|
||||||
|
<Accordion sx={{ mb: 2 }}>
|
||||||
|
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
|
||||||
|
<Box display="flex" alignItems="center" gap={2}>
|
||||||
|
<SecurityIcon />
|
||||||
|
<Typography variant="h6">Webhook Settings</Typography>
|
||||||
|
</Box>
|
||||||
|
</AccordionSummary>
|
||||||
|
<AccordionDetails>
|
||||||
|
<Box display="flex" flexDirection="column" gap={3}>
|
||||||
|
<TextField
|
||||||
|
label="Webhook Secret"
|
||||||
|
type="password"
|
||||||
|
value={config.webhook_settings.secret}
|
||||||
|
onChange={(e) => handleConfigChange('webhook_settings.secret', e.target.value)}
|
||||||
|
fullWidth
|
||||||
|
helperText="Secret for webhook signature validation"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<Typography variant="subtitle2">Enabled Webhook Events</Typography>
|
||||||
|
<FormGroup>
|
||||||
|
{['ticket.create', 'ticket.update', 'ticket.close', 'article.create'].map((event) => (
|
||||||
|
<FormControlLabel
|
||||||
|
key={event}
|
||||||
|
control={
|
||||||
|
<Switch
|
||||||
|
checked={config.webhook_settings.enabled_events.includes(event)}
|
||||||
|
onChange={() => handleArrayToggle('webhook_settings.enabled_events', event)}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
label={event}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</FormGroup>
|
||||||
|
</Box>
|
||||||
|
</AccordionDetails>
|
||||||
|
</Accordion>
|
||||||
|
|
||||||
|
{/* Notification Settings */}
|
||||||
|
<Accordion>
|
||||||
|
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
|
||||||
|
<Typography variant="h6">Notification Settings</Typography>
|
||||||
|
</AccordionSummary>
|
||||||
|
<AccordionDetails>
|
||||||
|
<Box display="flex" flexDirection="column" gap={3}>
|
||||||
|
<FormControlLabel
|
||||||
|
control={
|
||||||
|
<Switch
|
||||||
|
checked={config.notification_settings.email_notifications}
|
||||||
|
onChange={(e) => handleConfigChange('notification_settings.email_notifications', e.target.checked)}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
label="Email Notifications"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<TextField
|
||||||
|
label="Slack Webhook URL"
|
||||||
|
value={config.notification_settings.slack_webhook_url}
|
||||||
|
onChange={(e) => handleConfigChange('notification_settings.slack_webhook_url', e.target.value)}
|
||||||
|
fullWidth
|
||||||
|
placeholder="https://hooks.slack.com/services/..."
|
||||||
|
/>
|
||||||
|
|
||||||
|
<Typography variant="subtitle2">Notification Events</Typography>
|
||||||
|
<FormGroup>
|
||||||
|
{['sync_error', 'api_error', 'new_tickets', 'summarization_complete'].map((event) => (
|
||||||
|
<FormControlLabel
|
||||||
|
key={event}
|
||||||
|
control={
|
||||||
|
<Switch
|
||||||
|
checked={config.notification_settings.notification_events.includes(event)}
|
||||||
|
onChange={() => handleArrayToggle('notification_settings.notification_events', event)}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
label={event.replace('_', ' ').replace(/\b\w/g, l => l.toUpperCase())}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</FormGroup>
|
||||||
|
</Box>
|
||||||
|
</AccordionDetails>
|
||||||
|
</Accordion>
|
||||||
|
</Box>
|
||||||
|
);
|
||||||
|
};
|
||||||
Reference in New Issue
Block a user