mirror of
https://github.com/aljazceru/enclava.git
synced 2025-12-17 07:24:34 +01:00
Merge branch 'main' into redoing-things
This commit is contained in:
54
.env.example
54
.env.example
@@ -1,24 +1,34 @@
|
|||||||
# ===================================
|
# ===================================
|
||||||
# ENCLAVA MINIMAL CONFIGURATION
|
# ENCLAVA CONFIGURATION
|
||||||
# ===================================
|
# ===================================
|
||||||
# Only essential environment variables that CANNOT have defaults
|
# Only essential environment variables that CANNOT have defaults
|
||||||
# Other settings should be configurable through the app UI
|
# Other settings should be configurable through the app UI
|
||||||
|
|
||||||
|
# Admin user (created on first startup only)
|
||||||
|
ADMIN_EMAIL=admin@example.com
|
||||||
|
ADMIN_PASSWORD=admin123
|
||||||
|
|
||||||
|
|
||||||
|
# ===================================
|
||||||
|
# APPLICATION BASE URL (Required - derives all URLs and CORS)
|
||||||
|
# ===================================
|
||||||
|
BASE_URL=localhost
|
||||||
|
|
||||||
# ===================================
|
# ===================================
|
||||||
# INFRASTRUCTURE (Required)
|
# INFRASTRUCTURE (Required)
|
||||||
# ===================================
|
# ===================================
|
||||||
DATABASE_URL=postgresql://enclava_user:enclava_pass@enclava-postgres:5432/enclava_db
|
DATABASE_URL=postgresql://enclava_user:enclava_pass@enclava-postgres:5432/enclava_db
|
||||||
REDIS_URL=redis://enclava-redis:6379
|
REDIS_URL=redis://enclava-redis:6379
|
||||||
|
POSTGRES_DB=enclava_db
|
||||||
|
POSTGRES_USER=enclava_user
|
||||||
|
POSTGRES_PASSWORD=enclava_pass
|
||||||
# ===================================
|
# ===================================
|
||||||
# SECURITY CRITICAL (Required)
|
# SECURITY CRITICAL (Required)
|
||||||
# ===================================
|
# ===================================
|
||||||
JWT_SECRET=your-super-secret-jwt-key-here-change-in-production
|
JWT_SECRET=your-super-secret-jwt-key-here-change-in-production
|
||||||
PRIVATEMODE_API_KEY=your-privatemode-api-key-here
|
PRIVATEMODE_API_KEY=your-privatemode-api-key-here
|
||||||
|
|
||||||
# Admin user (created on first startup only)
|
|
||||||
ADMIN_EMAIL=admin@example.com
|
|
||||||
ADMIN_PASSWORD=admin123
|
|
||||||
|
|
||||||
# ===================================
|
# ===================================
|
||||||
# ADDITIONAL SECURITY SETTINGS (Optional but recommended)
|
# ADDITIONAL SECURITY SETTINGS (Optional but recommended)
|
||||||
@@ -34,29 +44,31 @@ ADMIN_PASSWORD=admin123
|
|||||||
# API Key prefix (default: en_)
|
# API Key prefix (default: en_)
|
||||||
# API_KEY_PREFIX=en_
|
# API_KEY_PREFIX=en_
|
||||||
|
|
||||||
# Security thresholds (0.0-1.0)
|
|
||||||
# API_SECURITY_RISK_THRESHOLD=0.8
|
|
||||||
# API_SECURITY_WARNING_THRESHOLD=0.6
|
|
||||||
# API_SECURITY_ANOMALY_THRESHOLD=0.7
|
|
||||||
|
|
||||||
# IP security (comma-separated for multiple IPs)
|
|
||||||
# API_BLOCKED_IPS=
|
|
||||||
# API_ALLOWED_IPS=
|
|
||||||
|
|
||||||
# ===================================
|
# ===================================
|
||||||
# APPLICATION BASE URL (Required - derives all URLs and CORS)
|
# FRONTEND ENVIRONMENT (Required for production)
|
||||||
# ===================================
|
# ===================================
|
||||||
BASE_URL=localhost
|
NODE_ENV=production
|
||||||
# Frontend derives: APP_URL=http://localhost, API_URL=http://localhost, WS_URL=ws://localhost
|
NEXT_PUBLIC_APP_NAME=Enclava
|
||||||
# Backend derives: CORS_ORIGINS=["http://localhost"]
|
# NEXT_PUBLIC_BASE_URL is derived from BASE_URL in Docker configuration
|
||||||
|
|
||||||
# ===================================
|
# ===================================
|
||||||
# DOCKER NETWORKING (Required for containers)
|
# LOGGING CONFIGURATION
|
||||||
# ===================================
|
# ===================================
|
||||||
BACKEND_INTERNAL_PORT=8000
|
LOG_LLM_PROMPTS=false
|
||||||
FRONTEND_INTERNAL_PORT=3000
|
|
||||||
# Hosts are fixed: enclava-backend, enclava-frontend
|
# For production HTTPS deployments, set:
|
||||||
# Upstreams derive: enclava-backend:8000, enclava-frontend:3000
|
# BASE_URL=your-domain.com
|
||||||
|
# The system will automatically detect HTTPS and use it for all URLs and CORS
|
||||||
|
|
||||||
|
# ===================================
|
||||||
|
# DOCKER NETWORKING (Optional - defaults provided)
|
||||||
|
# ===================================
|
||||||
|
# Internal ports use defaults: backend=8000, frontend=3000
|
||||||
|
# Override only if you need to change these defaults:
|
||||||
|
# BACKEND_INTERNAL_PORT=8000
|
||||||
|
# FRONTEND_INTERNAL_PORT=3000
|
||||||
|
|
||||||
# ===================================
|
# ===================================
|
||||||
# QDRANT (Required for RAG)
|
# QDRANT (Required for RAG)
|
||||||
|
|||||||
123
.github/workflows/build-all.yml
vendored
Normal file
123
.github/workflows/build-all.yml
vendored
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
name: Build All Docker Images
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags: [ 'v*' ]
|
||||||
|
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_NAME: ${{ github.repository }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-frontend:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to Container registry
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata for frontend
|
||||||
|
id: meta-frontend
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-frontend
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=raw,value=latest,enable={{is_default_branch}}
|
||||||
|
|
||||||
|
- name: Build and push frontend Docker image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: ./frontend
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta-frontend.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta-frontend.outputs.labels }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
|
||||||
|
build-backend:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to Container registry
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata for backend
|
||||||
|
id: meta-backend
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-backend
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=raw,value=latest,enable={{is_default_branch}}
|
||||||
|
|
||||||
|
- name: Build and push backend Docker image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: ./backend
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta-backend.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta-backend.outputs.labels }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
|
||||||
|
# Optional: Create a combined manifest or documentation
|
||||||
|
document-images:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [build-frontend, build-backend]
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Generate image documentation
|
||||||
|
run: |
|
||||||
|
echo "# Built Images" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "## Frontend Image" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-frontend:${{ github.ref_name }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "## Backend Image" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-backend:${{ github.ref_name }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -65,3 +65,4 @@ frontend/.next/
|
|||||||
frontend/node_modules/
|
frontend/node_modules/
|
||||||
node_modules/
|
node_modules/
|
||||||
venv/
|
venv/
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ ENV PYTHONPATH=/app
|
|||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install system dependencies
|
# Install system dependencies
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y \
|
||||||
build-essential \
|
build-essential \
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
postgresql-client \
|
postgresql-client \
|
||||||
|
|||||||
53
backend/Dockerfile.prod
Normal file
53
backend/Dockerfile.prod
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
# Set environment variables
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
|
ENV PYTHONPATH=/app
|
||||||
|
ENV NODE_ENV=production
|
||||||
|
ENV APP_ENV=production
|
||||||
|
|
||||||
|
# Set work directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN DEBIAN_FRONTEND=noninteractive apt-get update && apt-get install -y \
|
||||||
|
build-essential \
|
||||||
|
libpq-dev \
|
||||||
|
postgresql-client \
|
||||||
|
curl \
|
||||||
|
ffmpeg \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy requirements and install Python dependencies
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Optional: Install NLP requirements if needed
|
||||||
|
# COPY requirements-nlp.txt .
|
||||||
|
# RUN pip install --no-cache-dir -r requirements-nlp.txt
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Copy and make migration script executable
|
||||||
|
COPY scripts/migrate.sh /usr/local/bin/migrate.sh
|
||||||
|
RUN chmod +x /usr/local/bin/migrate.sh
|
||||||
|
|
||||||
|
# Create non-root user for security
|
||||||
|
RUN useradd --create-home --shell /bin/bash app && \
|
||||||
|
chown -R app:app /app
|
||||||
|
USER app
|
||||||
|
|
||||||
|
# Create logs directory
|
||||||
|
RUN mkdir -p logs
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
|
# Run the application in production mode
|
||||||
|
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||||
@@ -18,6 +18,7 @@ from ..v1.plugin_registry import router as plugin_registry_router
|
|||||||
from ..v1.platform import router as platform_router
|
from ..v1.platform import router as platform_router
|
||||||
from ..v1.llm_internal import router as llm_internal_router
|
from ..v1.llm_internal import router as llm_internal_router
|
||||||
from ..v1.chatbot import router as chatbot_router
|
from ..v1.chatbot import router as chatbot_router
|
||||||
|
from .debugging import router as debugging_router
|
||||||
|
|
||||||
# Create internal API router
|
# Create internal API router
|
||||||
internal_api_router = APIRouter()
|
internal_api_router = APIRouter()
|
||||||
@@ -67,3 +68,6 @@ internal_api_router.include_router(llm_internal_router, prefix="/llm", tags=["in
|
|||||||
|
|
||||||
# Include chatbot routes (frontend chatbot management)
|
# Include chatbot routes (frontend chatbot management)
|
||||||
internal_api_router.include_router(chatbot_router, prefix="/chatbot", tags=["internal-chatbot"])
|
internal_api_router.include_router(chatbot_router, prefix="/chatbot", tags=["internal-chatbot"])
|
||||||
|
|
||||||
|
# Include debugging routes (troubleshooting and diagnostics)
|
||||||
|
internal_api_router.include_router(debugging_router, prefix="/debugging", tags=["internal-debugging"])
|
||||||
215
backend/app/api/internal_v1/debugging.py
Normal file
215
backend/app/api/internal_v1/debugging.py
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
"""
|
||||||
|
Debugging API endpoints for troubleshooting chatbot issues
|
||||||
|
"""
|
||||||
|
from typing import Dict, Any, List
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
from app.core.security import get_current_user
|
||||||
|
from app.db.database import get_db
|
||||||
|
from app.models.user import User
|
||||||
|
from app.models.chatbot import ChatbotInstance
|
||||||
|
from app.models.prompt_template import PromptTemplate
|
||||||
|
from app.models.rag_collection import RagCollection
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/chatbot/{chatbot_id}/config")
|
||||||
|
async def get_chatbot_config_debug(
|
||||||
|
chatbot_id: str,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
current_user: User = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""Get detailed configuration for debugging a specific chatbot"""
|
||||||
|
|
||||||
|
# Get chatbot instance
|
||||||
|
chatbot = db.query(ChatbotInstance).filter(
|
||||||
|
ChatbotInstance.id == chatbot_id,
|
||||||
|
ChatbotInstance.user_id == current_user.id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not chatbot:
|
||||||
|
raise HTTPException(status_code=404, detail="Chatbot not found")
|
||||||
|
|
||||||
|
# Get prompt template
|
||||||
|
prompt_template = db.query(PromptTemplate).filter(
|
||||||
|
PromptTemplate.type == chatbot.chatbot_type
|
||||||
|
).first()
|
||||||
|
|
||||||
|
# Get RAG collections if configured
|
||||||
|
rag_collections = []
|
||||||
|
if chatbot.rag_collection_ids:
|
||||||
|
collection_ids = chatbot.rag_collection_ids
|
||||||
|
if isinstance(collection_ids, str):
|
||||||
|
import json
|
||||||
|
try:
|
||||||
|
collection_ids = json.loads(collection_ids)
|
||||||
|
except:
|
||||||
|
collection_ids = []
|
||||||
|
|
||||||
|
if collection_ids:
|
||||||
|
collections = db.query(RagCollection).filter(
|
||||||
|
RagCollection.id.in_(collection_ids)
|
||||||
|
).all()
|
||||||
|
rag_collections = [
|
||||||
|
{
|
||||||
|
"id": col.id,
|
||||||
|
"name": col.name,
|
||||||
|
"document_count": col.document_count,
|
||||||
|
"qdrant_collection_name": col.qdrant_collection_name,
|
||||||
|
"is_active": col.is_active
|
||||||
|
}
|
||||||
|
for col in collections
|
||||||
|
]
|
||||||
|
|
||||||
|
# Get recent conversations count
|
||||||
|
from app.models.chatbot import ChatbotConversation
|
||||||
|
conversation_count = db.query(ChatbotConversation).filter(
|
||||||
|
ChatbotConversation.chatbot_instance_id == chatbot_id
|
||||||
|
).count()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"chatbot": {
|
||||||
|
"id": chatbot.id,
|
||||||
|
"name": chatbot.name,
|
||||||
|
"type": chatbot.chatbot_type,
|
||||||
|
"description": chatbot.description,
|
||||||
|
"created_at": chatbot.created_at,
|
||||||
|
"is_active": chatbot.is_active,
|
||||||
|
"conversation_count": conversation_count
|
||||||
|
},
|
||||||
|
"prompt_template": {
|
||||||
|
"type": prompt_template.type if prompt_template else None,
|
||||||
|
"system_prompt": prompt_template.system_prompt if prompt_template else None,
|
||||||
|
"variables": prompt_template.variables if prompt_template else []
|
||||||
|
},
|
||||||
|
"rag_collections": rag_collections,
|
||||||
|
"configuration": {
|
||||||
|
"max_tokens": chatbot.max_tokens,
|
||||||
|
"temperature": chatbot.temperature,
|
||||||
|
"streaming": chatbot.streaming,
|
||||||
|
"memory_config": chatbot.memory_config
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/chatbot/{chatbot_id}/test-rag")
|
||||||
|
async def test_rag_search(
|
||||||
|
chatbot_id: str,
|
||||||
|
query: str = "test query",
|
||||||
|
top_k: int = 5,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
current_user: User = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""Test RAG search for a specific chatbot"""
|
||||||
|
|
||||||
|
# Get chatbot instance
|
||||||
|
chatbot = db.query(ChatbotInstance).filter(
|
||||||
|
ChatbotInstance.id == chatbot_id,
|
||||||
|
ChatbotInstance.user_id == current_user.id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not chatbot:
|
||||||
|
raise HTTPException(status_code=404, detail="Chatbot not found")
|
||||||
|
|
||||||
|
# Test RAG search
|
||||||
|
try:
|
||||||
|
from app.modules.rag.main import rag_module
|
||||||
|
|
||||||
|
# Get collection IDs
|
||||||
|
collection_ids = []
|
||||||
|
if chatbot.rag_collection_ids:
|
||||||
|
if isinstance(chatbot.rag_collection_ids, str):
|
||||||
|
import json
|
||||||
|
try:
|
||||||
|
collection_ids = json.loads(chatbot.rag_collection_ids)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
elif isinstance(chatbot.rag_collection_ids, list):
|
||||||
|
collection_ids = chatbot.rag_collection_ids
|
||||||
|
|
||||||
|
if not collection_ids:
|
||||||
|
return {
|
||||||
|
"query": query,
|
||||||
|
"results": [],
|
||||||
|
"message": "No RAG collections configured for this chatbot"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Perform search
|
||||||
|
search_results = await rag_module.search(
|
||||||
|
query=query,
|
||||||
|
collection_ids=collection_ids,
|
||||||
|
top_k=top_k,
|
||||||
|
score_threshold=0.5
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"query": query,
|
||||||
|
"results": search_results,
|
||||||
|
"collections_searched": collection_ids,
|
||||||
|
"result_count": len(search_results)
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return {
|
||||||
|
"query": query,
|
||||||
|
"results": [],
|
||||||
|
"error": str(e),
|
||||||
|
"message": "RAG search failed"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/system/status")
|
||||||
|
async def get_system_status(
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
current_user: User = Depends(get_current_user)
|
||||||
|
):
|
||||||
|
"""Get system status for debugging"""
|
||||||
|
|
||||||
|
# Check database connectivity
|
||||||
|
try:
|
||||||
|
db.execute(text("SELECT 1"))
|
||||||
|
db_status = "healthy"
|
||||||
|
except Exception as e:
|
||||||
|
db_status = f"error: {str(e)}"
|
||||||
|
|
||||||
|
# Check module status
|
||||||
|
module_status = {}
|
||||||
|
try:
|
||||||
|
from app.services.module_manager import module_manager
|
||||||
|
modules = module_manager.list_modules()
|
||||||
|
for module_name, module_info in modules.items():
|
||||||
|
module_status[module_name] = {
|
||||||
|
"status": module_info.get("status", "unknown"),
|
||||||
|
"enabled": module_info.get("enabled", False)
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
module_status = {"error": str(e)}
|
||||||
|
|
||||||
|
# Check Redis (if configured)
|
||||||
|
redis_status = "not configured"
|
||||||
|
try:
|
||||||
|
from app.core.cache import core_cache
|
||||||
|
await core_cache.ping()
|
||||||
|
redis_status = "healthy"
|
||||||
|
except Exception as e:
|
||||||
|
redis_status = f"error: {str(e)}"
|
||||||
|
|
||||||
|
# Check Qdrant (if configured)
|
||||||
|
qdrant_status = "not configured"
|
||||||
|
try:
|
||||||
|
from app.services.qdrant_service import qdrant_service
|
||||||
|
collections = await qdrant_service.list_collections()
|
||||||
|
qdrant_status = f"healthy ({len(collections)} collections)"
|
||||||
|
except Exception as e:
|
||||||
|
qdrant_status = f"error: {str(e)}"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"database": db_status,
|
||||||
|
"modules": module_status,
|
||||||
|
"redis": redis_status,
|
||||||
|
"qdrant": qdrant_status,
|
||||||
|
"timestamp": "UTC"
|
||||||
|
}
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
"""
|
"""Authentication API endpoints"""
|
||||||
Authentication API endpoints
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
import logging
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
@@ -12,6 +11,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
|||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
|
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
|
from app.core.logging import get_logger
|
||||||
from app.core.security import (
|
from app.core.security import (
|
||||||
verify_password,
|
verify_password,
|
||||||
get_password_hash,
|
get_password_hash,
|
||||||
@@ -25,6 +25,8 @@ from app.db.database import get_db
|
|||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.utils.exceptions import AuthenticationError, ValidationError
|
from app.utils.exceptions import AuthenticationError, ValidationError
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
security = HTTPBearer()
|
security = HTTPBearer()
|
||||||
|
|
||||||
@@ -159,17 +161,71 @@ async def login(
|
|||||||
):
|
):
|
||||||
"""Login user and return access tokens"""
|
"""Login user and return access tokens"""
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"LOGIN_DEBUG_START",
|
||||||
|
request_time=datetime.utcnow().isoformat(),
|
||||||
|
email=user_data.email,
|
||||||
|
database_url="SET" if settings.DATABASE_URL else "NOT SET",
|
||||||
|
jwt_secret="SET" if settings.JWT_SECRET else "NOT SET",
|
||||||
|
admin_email=settings.ADMIN_EMAIL,
|
||||||
|
bcrypt_rounds=settings.BCRYPT_ROUNDS,
|
||||||
|
)
|
||||||
|
|
||||||
|
start_time = datetime.utcnow()
|
||||||
|
|
||||||
# Get user by email
|
# Get user by email
|
||||||
|
logger.info("LOGIN_USER_QUERY_START")
|
||||||
|
query_start = datetime.utcnow()
|
||||||
stmt = select(User).where(User.email == user_data.email)
|
stmt = select(User).where(User.email == user_data.email)
|
||||||
result = await db.execute(stmt)
|
result = await db.execute(stmt)
|
||||||
|
query_end = datetime.utcnow()
|
||||||
|
logger.info(
|
||||||
|
"LOGIN_USER_QUERY_END",
|
||||||
|
duration_seconds=(query_end - query_start).total_seconds(),
|
||||||
|
)
|
||||||
|
|
||||||
user = result.scalar_one_or_none()
|
user = result.scalar_one_or_none()
|
||||||
|
|
||||||
if not user or not verify_password(user_data.password, user.hashed_password):
|
if not user:
|
||||||
|
logger.warning("LOGIN_USER_NOT_FOUND", email=user_data.email)
|
||||||
|
# List available users for debugging
|
||||||
|
try:
|
||||||
|
all_users_stmt = select(User).limit(5)
|
||||||
|
all_users_result = await db.execute(all_users_stmt)
|
||||||
|
all_users = all_users_result.scalars().all()
|
||||||
|
logger.info(
|
||||||
|
"LOGIN_USER_LIST",
|
||||||
|
users=[u.email for u in all_users],
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("LOGIN_USER_LIST_FAILURE", error=str(e))
|
||||||
|
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
detail="Incorrect email or password"
|
detail="Incorrect email or password"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
logger.info("LOGIN_USER_FOUND", email=user.email, is_active=user.is_active)
|
||||||
|
logger.info("LOGIN_PASSWORD_VERIFY_START")
|
||||||
|
verify_start = datetime.utcnow()
|
||||||
|
|
||||||
|
if not verify_password(user_data.password, user.hashed_password):
|
||||||
|
verify_end = datetime.utcnow()
|
||||||
|
logger.warning(
|
||||||
|
"LOGIN_PASSWORD_VERIFY_FAILURE",
|
||||||
|
duration_seconds=(verify_end - verify_start).total_seconds(),
|
||||||
|
)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Incorrect email or password"
|
||||||
|
)
|
||||||
|
|
||||||
|
verify_end = datetime.utcnow()
|
||||||
|
logger.info(
|
||||||
|
"LOGIN_PASSWORD_VERIFY_SUCCESS",
|
||||||
|
duration_seconds=(verify_end - verify_start).total_seconds(),
|
||||||
|
)
|
||||||
|
|
||||||
if not user.is_active:
|
if not user.is_active:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
@@ -177,11 +233,21 @@ async def login(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Update last login
|
# Update last login
|
||||||
|
logger.info("LOGIN_LAST_LOGIN_UPDATE_START")
|
||||||
|
update_start = datetime.utcnow()
|
||||||
user.update_last_login()
|
user.update_last_login()
|
||||||
await db.commit()
|
await db.commit()
|
||||||
|
update_end = datetime.utcnow()
|
||||||
|
logger.info(
|
||||||
|
"LOGIN_LAST_LOGIN_UPDATE_SUCCESS",
|
||||||
|
duration_seconds=(update_end - update_start).total_seconds(),
|
||||||
|
)
|
||||||
|
|
||||||
# Create tokens
|
# Create tokens
|
||||||
|
logger.info("LOGIN_TOKEN_CREATE_START")
|
||||||
|
token_start = datetime.utcnow()
|
||||||
access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||||
|
|
||||||
access_token = create_access_token(
|
access_token = create_access_token(
|
||||||
data={
|
data={
|
||||||
"sub": str(user.id),
|
"sub": str(user.id),
|
||||||
@@ -195,6 +261,17 @@ async def login(
|
|||||||
refresh_token = create_refresh_token(
|
refresh_token = create_refresh_token(
|
||||||
data={"sub": str(user.id), "type": "refresh"}
|
data={"sub": str(user.id), "type": "refresh"}
|
||||||
)
|
)
|
||||||
|
token_end = datetime.utcnow()
|
||||||
|
logger.info(
|
||||||
|
"LOGIN_TOKEN_CREATE_SUCCESS",
|
||||||
|
duration_seconds=(token_end - token_start).total_seconds(),
|
||||||
|
)
|
||||||
|
|
||||||
|
total_time = datetime.utcnow() - start_time
|
||||||
|
logger.info(
|
||||||
|
"LOGIN_DEBUG_COMPLETE",
|
||||||
|
total_duration_seconds=total_time.total_seconds(),
|
||||||
|
)
|
||||||
|
|
||||||
return TokenResponse(
|
return TokenResponse(
|
||||||
access_token=access_token,
|
access_token=access_token,
|
||||||
@@ -234,6 +311,10 @@ async def refresh_token(
|
|||||||
|
|
||||||
# Create new access token
|
# Create new access token
|
||||||
access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||||
|
logger.info(f"REFRESH: Creating new access token with expiration: {access_token_expires}")
|
||||||
|
logger.info(f"REFRESH: ACCESS_TOKEN_EXPIRE_MINUTES from settings: {settings.ACCESS_TOKEN_EXPIRE_MINUTES}")
|
||||||
|
logger.info(f"REFRESH: Current UTC time: {datetime.utcnow().isoformat()}")
|
||||||
|
|
||||||
access_token = create_access_token(
|
access_token = create_access_token(
|
||||||
data={
|
data={
|
||||||
"sub": str(user.id),
|
"sub": str(user.id),
|
||||||
|
|||||||
@@ -24,22 +24,23 @@ class Settings(BaseSettings):
|
|||||||
LOG_LLM_PROMPTS: bool = os.getenv("LOG_LLM_PROMPTS", "False").lower() == "true" # Set to True to log prompts and context sent to LLM
|
LOG_LLM_PROMPTS: bool = os.getenv("LOG_LLM_PROMPTS", "False").lower() == "true" # Set to True to log prompts and context sent to LLM
|
||||||
|
|
||||||
# Database
|
# Database
|
||||||
DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql://empire_user:empire_pass@localhost:5432/empire_db")
|
DATABASE_URL: str = os.getenv("DATABASE_URL")
|
||||||
|
|
||||||
# Redis
|
# Redis
|
||||||
REDIS_URL: str = os.getenv("REDIS_URL", "redis://localhost:6379")
|
REDIS_URL: str = os.getenv("REDIS_URL", "redis://localhost:6379")
|
||||||
|
|
||||||
# Security
|
# Security
|
||||||
JWT_SECRET: str = os.getenv("JWT_SECRET", "your-super-secret-jwt-key-here")
|
JWT_SECRET: str = os.getenv("JWT_SECRET")
|
||||||
JWT_ALGORITHM: str = os.getenv("JWT_ALGORITHM", "HS256")
|
JWT_ALGORITHM: str = os.getenv("JWT_ALGORITHM", "HS256")
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = int(os.getenv("ACCESS_TOKEN_EXPIRE_MINUTES", "30"))
|
ACCESS_TOKEN_EXPIRE_MINUTES: int = int(os.getenv("ACCESS_TOKEN_EXPIRE_MINUTES", "1440")) # 24 hours
|
||||||
REFRESH_TOKEN_EXPIRE_MINUTES: int = int(os.getenv("REFRESH_TOKEN_EXPIRE_MINUTES", "10080")) # 7 days
|
REFRESH_TOKEN_EXPIRE_MINUTES: int = int(os.getenv("REFRESH_TOKEN_EXPIRE_MINUTES", "10080")) # 7 days
|
||||||
SESSION_EXPIRE_MINUTES: int = int(os.getenv("SESSION_EXPIRE_MINUTES", "1440")) # 24 hours
|
SESSION_EXPIRE_MINUTES: int = int(os.getenv("SESSION_EXPIRE_MINUTES", "1440")) # 24 hours
|
||||||
API_KEY_PREFIX: str = os.getenv("API_KEY_PREFIX", "en_")
|
API_KEY_PREFIX: str = os.getenv("API_KEY_PREFIX", "en_")
|
||||||
|
BCRYPT_ROUNDS: int = int(os.getenv("BCRYPT_ROUNDS", "6")) # Bcrypt work factor - lower for production performance
|
||||||
|
|
||||||
# Admin user provisioning (used only on first startup)
|
# Admin user provisioning (used only on first startup)
|
||||||
ADMIN_EMAIL: str = os.getenv("ADMIN_EMAIL", "admin@example.com")
|
ADMIN_EMAIL: str = os.getenv("ADMIN_EMAIL")
|
||||||
ADMIN_PASSWORD: str = os.getenv("ADMIN_PASSWORD", "admin123")
|
ADMIN_PASSWORD: str = os.getenv("ADMIN_PASSWORD")
|
||||||
|
|
||||||
# Base URL for deriving CORS origins
|
# Base URL for deriving CORS origins
|
||||||
BASE_URL: str = os.getenv("BASE_URL", "localhost")
|
BASE_URL: str = os.getenv("BASE_URL", "localhost")
|
||||||
@@ -50,7 +51,8 @@ class Settings(BaseSettings):
|
|||||||
"""Derive CORS origins from BASE_URL if not explicitly set"""
|
"""Derive CORS origins from BASE_URL if not explicitly set"""
|
||||||
if v is None:
|
if v is None:
|
||||||
base_url = info.data.get('BASE_URL', 'localhost')
|
base_url = info.data.get('BASE_URL', 'localhost')
|
||||||
return [f"http://{base_url}"]
|
# Support both HTTP and HTTPS for production environments
|
||||||
|
return [f"http://{base_url}", f"https://{base_url}"]
|
||||||
return v if isinstance(v, list) else [v]
|
return v if isinstance(v, list) else [v]
|
||||||
|
|
||||||
# CORS origins (derived from BASE_URL)
|
# CORS origins (derived from BASE_URL)
|
||||||
@@ -152,3 +154,4 @@ class Settings(BaseSettings):
|
|||||||
|
|
||||||
# Global settings instance
|
# Global settings instance
|
||||||
settings = Settings()
|
settings = Settings()
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
Security utilities for authentication and authorization
|
Security utilities for authentication and authorization
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import concurrent.futures
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Optional, Dict, Any
|
from typing import Optional, Dict, Any
|
||||||
@@ -20,14 +22,47 @@ from app.utils.exceptions import AuthenticationError, AuthorizationError
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Password hashing
|
# Password hashing
|
||||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
# Use a lower work factor for better performance in production
|
||||||
|
pwd_context = CryptContext(
|
||||||
|
schemes=["bcrypt"],
|
||||||
|
deprecated="auto",
|
||||||
|
bcrypt__rounds=settings.BCRYPT_ROUNDS
|
||||||
|
)
|
||||||
|
|
||||||
# JWT token handling
|
# JWT token handling
|
||||||
security = HTTPBearer()
|
security = HTTPBearer()
|
||||||
|
|
||||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||||
"""Verify a password against its hash"""
|
"""Verify a password against its hash"""
|
||||||
return pwd_context.verify(plain_password, hashed_password)
|
import time
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
logger.info(f"=== PASSWORD VERIFICATION START === BCRYPT_ROUNDS: {settings.BCRYPT_ROUNDS}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Run password verification in a thread with timeout
|
||||||
|
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
|
||||||
|
future = executor.submit(pwd_context.verify, plain_password, hashed_password)
|
||||||
|
result = future.result(timeout=5.0) # 5 second timeout
|
||||||
|
|
||||||
|
end_time = time.time()
|
||||||
|
duration = end_time - start_time
|
||||||
|
logger.info(f"=== PASSWORD VERIFICATION END === Duration: {duration:.3f}s, Result: {result}")
|
||||||
|
|
||||||
|
if duration > 1:
|
||||||
|
logger.warning(f"PASSWORD VERIFICATION TOOK TOO LONG: {duration:.3f}s")
|
||||||
|
|
||||||
|
return result
|
||||||
|
except concurrent.futures.TimeoutError:
|
||||||
|
end_time = time.time()
|
||||||
|
duration = end_time - start_time
|
||||||
|
logger.error(f"=== PASSWORD VERIFICATION TIMEOUT === Duration: {duration:.3f}s")
|
||||||
|
return False # Treat timeout as verification failure
|
||||||
|
except Exception as e:
|
||||||
|
end_time = time.time()
|
||||||
|
duration = end_time - start_time
|
||||||
|
logger.error(f"=== PASSWORD VERIFICATION FAILED === Duration: {duration:.3f}s, Error: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
def get_password_hash(password: str) -> str:
|
def get_password_hash(password: str) -> str:
|
||||||
"""Generate password hash"""
|
"""Generate password hash"""
|
||||||
@@ -43,15 +78,42 @@ def get_api_key_hash(api_key: str) -> str:
|
|||||||
|
|
||||||
def create_access_token(data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
def create_access_token(data: Dict[str, Any], expires_delta: Optional[timedelta] = None) -> str:
|
||||||
"""Create JWT access token"""
|
"""Create JWT access token"""
|
||||||
to_encode = data.copy()
|
import time
|
||||||
if expires_delta:
|
start_time = time.time()
|
||||||
expire = datetime.utcnow() + expires_delta
|
logger.info(f"=== CREATE ACCESS TOKEN START ===")
|
||||||
else:
|
|
||||||
expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
|
||||||
|
|
||||||
to_encode.update({"exp": expire})
|
try:
|
||||||
encoded_jwt = jwt.encode(to_encode, settings.JWT_SECRET, algorithm=settings.JWT_ALGORITHM)
|
to_encode = data.copy()
|
||||||
return encoded_jwt
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||||
|
|
||||||
|
to_encode.update({"exp": expire})
|
||||||
|
logger.info(f"JWT encode start...")
|
||||||
|
encode_start = time.time()
|
||||||
|
encoded_jwt = jwt.encode(to_encode, settings.JWT_SECRET, algorithm=settings.JWT_ALGORITHM)
|
||||||
|
encode_end = time.time()
|
||||||
|
encode_duration = encode_end - encode_start
|
||||||
|
|
||||||
|
end_time = time.time()
|
||||||
|
total_duration = end_time - start_time
|
||||||
|
|
||||||
|
# Log token creation details
|
||||||
|
logger.info(f"Created access token for user {data.get('sub')}")
|
||||||
|
logger.info(f"Token expires at: {expire.isoformat()} (UTC)")
|
||||||
|
logger.info(f"Current UTC time: {datetime.utcnow().isoformat()}")
|
||||||
|
logger.info(f"ACCESS_TOKEN_EXPIRE_MINUTES setting: {settings.ACCESS_TOKEN_EXPIRE_MINUTES}")
|
||||||
|
logger.info(f"JWT encode duration: {encode_duration:.3f}s")
|
||||||
|
logger.info(f"Total token creation duration: {total_duration:.3f}s")
|
||||||
|
logger.info(f"=== CREATE ACCESS TOKEN END ===")
|
||||||
|
|
||||||
|
return encoded_jwt
|
||||||
|
except Exception as e:
|
||||||
|
end_time = time.time()
|
||||||
|
total_duration = end_time - start_time
|
||||||
|
logger.error(f"=== CREATE ACCESS TOKEN FAILED === Duration: {total_duration:.3f}s, Error: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
def create_refresh_token(data: Dict[str, Any]) -> str:
|
def create_refresh_token(data: Dict[str, Any]) -> str:
|
||||||
"""Create JWT refresh token"""
|
"""Create JWT refresh token"""
|
||||||
@@ -64,10 +126,27 @@ def create_refresh_token(data: Dict[str, Any]) -> str:
|
|||||||
def verify_token(token: str) -> Dict[str, Any]:
|
def verify_token(token: str) -> Dict[str, Any]:
|
||||||
"""Verify JWT token and return payload"""
|
"""Verify JWT token and return payload"""
|
||||||
try:
|
try:
|
||||||
|
# Log current time before verification
|
||||||
|
current_time = datetime.utcnow()
|
||||||
|
logger.info(f"Verifying token at: {current_time.isoformat()} (UTC)")
|
||||||
|
|
||||||
|
# Decode without verification first to check expiration
|
||||||
|
try:
|
||||||
|
unverified_payload = jwt.get_unverified_claims(token)
|
||||||
|
exp_timestamp = unverified_payload.get('exp')
|
||||||
|
if exp_timestamp:
|
||||||
|
exp_datetime = datetime.fromtimestamp(exp_timestamp, tz=None)
|
||||||
|
logger.info(f"Token expiration time: {exp_datetime.isoformat()} (UTC)")
|
||||||
|
logger.info(f"Time until expiration: {(exp_datetime - current_time).total_seconds()} seconds")
|
||||||
|
except Exception as decode_error:
|
||||||
|
logger.warning(f"Could not decode token for expiration check: {decode_error}")
|
||||||
|
|
||||||
payload = jwt.decode(token, settings.JWT_SECRET, algorithms=[settings.JWT_ALGORITHM])
|
payload = jwt.decode(token, settings.JWT_SECRET, algorithms=[settings.JWT_ALGORITHM])
|
||||||
|
logger.info(f"Token verified successfully for user {payload.get('sub')}")
|
||||||
return payload
|
return payload
|
||||||
except JWTError as e:
|
except JWTError as e:
|
||||||
logger.warning(f"Token verification failed: {e}")
|
logger.warning(f"Token verification failed: {e}")
|
||||||
|
logger.warning(f"Current UTC time: {datetime.utcnow().isoformat()}")
|
||||||
raise AuthenticationError("Invalid token")
|
raise AuthenticationError("Invalid token")
|
||||||
|
|
||||||
async def get_current_user(
|
async def get_current_user(
|
||||||
@@ -76,6 +155,10 @@ async def get_current_user(
|
|||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""Get current user from JWT token"""
|
"""Get current user from JWT token"""
|
||||||
try:
|
try:
|
||||||
|
# Log server time for debugging clock sync issues
|
||||||
|
server_time = datetime.utcnow()
|
||||||
|
logger.info(f"get_current_user called at: {server_time.isoformat()} (UTC)")
|
||||||
|
|
||||||
payload = verify_token(credentials.credentials)
|
payload = verify_token(credentials.credentials)
|
||||||
user_id: str = payload.get("sub")
|
user_id: str = payload.get("sub")
|
||||||
if user_id is None:
|
if user_id is None:
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ engine = create_async_engine(
|
|||||||
pool_recycle=3600, # Recycle connections every hour
|
pool_recycle=3600, # Recycle connections every hour
|
||||||
pool_timeout=30, # Max time to get connection from pool
|
pool_timeout=30, # Max time to get connection from pool
|
||||||
connect_args={
|
connect_args={
|
||||||
|
"timeout": 5,
|
||||||
"command_timeout": 5,
|
"command_timeout": 5,
|
||||||
"server_settings": {
|
"server_settings": {
|
||||||
"application_name": "enclava_backend",
|
"application_name": "enclava_backend",
|
||||||
@@ -49,6 +50,7 @@ sync_engine = create_engine(
|
|||||||
pool_recycle=3600, # Recycle connections every hour
|
pool_recycle=3600, # Recycle connections every hour
|
||||||
pool_timeout=30, # Max time to get connection from pool
|
pool_timeout=30, # Max time to get connection from pool
|
||||||
connect_args={
|
connect_args={
|
||||||
|
"connect_timeout": 5,
|
||||||
"application_name": "enclava_backend_sync",
|
"application_name": "enclava_backend_sync",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@@ -68,17 +70,33 @@ metadata = MetaData()
|
|||||||
|
|
||||||
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||||
"""Get database session"""
|
"""Get database session"""
|
||||||
async with async_session_factory() as session:
|
import time
|
||||||
try:
|
start_time = time.time()
|
||||||
yield session
|
request_id = f"db_{int(time.time() * 1000)}"
|
||||||
except Exception as e:
|
|
||||||
# Only log if there's an actual error, not normal operation
|
logger.info(f"[{request_id}] === DATABASE SESSION START ===")
|
||||||
if str(e).strip(): # Only log if error message exists
|
|
||||||
logger.error(f"Database session error: {str(e)}", exc_info=True)
|
try:
|
||||||
await session.rollback()
|
logger.info(f"[{request_id}] Creating database session...")
|
||||||
raise
|
async with async_session_factory() as session:
|
||||||
finally:
|
logger.info(f"[{request_id}] Database session created successfully")
|
||||||
await session.close()
|
try:
|
||||||
|
yield session
|
||||||
|
except Exception as e:
|
||||||
|
# Only log if there's an actual error, not normal operation
|
||||||
|
if str(e).strip(): # Only log if error message exists
|
||||||
|
logger.error(f"[{request_id}] Database session error: {str(e)}", exc_info=True)
|
||||||
|
await session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
close_start = time.time()
|
||||||
|
await session.close()
|
||||||
|
close_time = time.time() - close_start
|
||||||
|
total_time = time.time() - start_time
|
||||||
|
logger.info(f"[{request_id}] Database session closed. Close time: {close_time:.3f}s, Total time: {total_time:.3f}s")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"[{request_id}] Failed to create database session: {e}", exc_info=True)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
async def init_db():
|
async def init_db():
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
"""
|
"""Main FastAPI application entry point"""
|
||||||
Main FastAPI application entry point
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
|
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
@@ -14,10 +14,13 @@ from fastapi.exceptions import RequestValidationError
|
|||||||
from starlette.exceptions import HTTPException
|
from starlette.exceptions import HTTPException
|
||||||
from starlette.middleware.sessions import SessionMiddleware
|
from starlette.middleware.sessions import SessionMiddleware
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
from app.core.logging import setup_logging
|
from app.core.logging import setup_logging
|
||||||
from app.core.security import get_current_user
|
from app.core.security import get_current_user
|
||||||
from app.db.database import init_db
|
from app.db.database import init_db, async_session_factory
|
||||||
from app.api.internal_v1 import internal_api_router
|
from app.api.internal_v1 import internal_api_router
|
||||||
from app.api.public_v1 import public_api_router
|
from app.api.public_v1 import public_api_router
|
||||||
from app.utils.exceptions import CustomHTTPException
|
from app.utils.exceptions import CustomHTTPException
|
||||||
@@ -32,6 +35,68 @@ setup_logging()
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def _check_redis_startup():
|
||||||
|
"""Validate Redis connectivity during startup."""
|
||||||
|
if not settings.REDIS_URL:
|
||||||
|
logger.info("Startup Redis check skipped: REDIS_URL not configured")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
import redis.asyncio as redis
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
logger.warning("Startup Redis check skipped: redis library not installed")
|
||||||
|
return
|
||||||
|
|
||||||
|
client = redis.from_url(
|
||||||
|
settings.REDIS_URL,
|
||||||
|
socket_connect_timeout=1.0,
|
||||||
|
socket_timeout=1.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
start = time.perf_counter()
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(client.ping(), timeout=3.0)
|
||||||
|
duration = time.perf_counter() - start
|
||||||
|
logger.info(
|
||||||
|
"Startup Redis check succeeded",
|
||||||
|
extra={"redis_url": settings.REDIS_URL, "duration_seconds": round(duration, 3)},
|
||||||
|
)
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
logger.warning(
|
||||||
|
"Startup Redis check failed",
|
||||||
|
extra={"error": str(exc), "redis_url": settings.REDIS_URL},
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
await client.close()
|
||||||
|
|
||||||
|
|
||||||
|
async def _check_database_startup():
|
||||||
|
"""Validate database connectivity during startup."""
|
||||||
|
start = time.perf_counter()
|
||||||
|
try:
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
await asyncio.wait_for(session.execute(select(1)), timeout=3.0)
|
||||||
|
duration = time.perf_counter() - start
|
||||||
|
logger.info(
|
||||||
|
"Startup database check succeeded",
|
||||||
|
extra={"duration_seconds": round(duration, 3)},
|
||||||
|
)
|
||||||
|
except (asyncio.TimeoutError, SQLAlchemyError) as exc:
|
||||||
|
logger.error(
|
||||||
|
"Startup database check failed",
|
||||||
|
extra={"error": str(exc)},
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
async def run_startup_dependency_checks():
|
||||||
|
"""Run dependency checks once during application startup."""
|
||||||
|
logger.info("Running startup dependency checks...")
|
||||||
|
await _check_redis_startup()
|
||||||
|
await _check_database_startup()
|
||||||
|
logger.info("Startup dependency checks complete")
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def lifespan(app: FastAPI):
|
async def lifespan(app: FastAPI):
|
||||||
"""
|
"""
|
||||||
@@ -47,6 +112,13 @@ async def lifespan(app: FastAPI):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Core cache service initialization failed: {e}")
|
logger.warning(f"Core cache service initialization failed: {e}")
|
||||||
|
|
||||||
|
# Run one-time dependency checks (non-blocking for auth requests)
|
||||||
|
try:
|
||||||
|
await run_startup_dependency_checks()
|
||||||
|
except Exception:
|
||||||
|
logger.error("Critical dependency check failed during startup")
|
||||||
|
raise
|
||||||
|
|
||||||
# Initialize database
|
# Initialize database
|
||||||
await init_db()
|
await init_db()
|
||||||
|
|
||||||
@@ -65,8 +137,16 @@ async def lifespan(app: FastAPI):
|
|||||||
init_analytics_service()
|
init_analytics_service()
|
||||||
|
|
||||||
# Initialize module manager with FastAPI app for router registration
|
# Initialize module manager with FastAPI app for router registration
|
||||||
|
logger.info("Initializing module manager...")
|
||||||
await module_manager.initialize(app)
|
await module_manager.initialize(app)
|
||||||
app.state.module_manager = module_manager
|
app.state.module_manager = module_manager
|
||||||
|
logger.info("Module manager initialized successfully")
|
||||||
|
|
||||||
|
# Initialize permission registry
|
||||||
|
logger.info("Initializing permission registry...")
|
||||||
|
from app.services.permission_manager import permission_registry
|
||||||
|
permission_registry.register_platform_permissions()
|
||||||
|
logger.info("Permission registry initialized successfully")
|
||||||
|
|
||||||
# Initialize document processor
|
# Initialize document processor
|
||||||
from app.services.document_processor import document_processor
|
from app.services.document_processor import document_processor
|
||||||
|
|||||||
125
backend/app/middleware/debugging.py
Normal file
125
backend/app/middleware/debugging.py
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
"""
|
||||||
|
Debugging middleware for detailed request/response logging
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
from fastapi import Request, Response
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DebuggingMiddleware(BaseHTTPMiddleware):
|
||||||
|
"""Middleware to log detailed request/response information for debugging"""
|
||||||
|
|
||||||
|
async def dispatch(self, request: Request, call_next):
|
||||||
|
# Generate unique request ID for tracing
|
||||||
|
request_id = str(uuid4())
|
||||||
|
|
||||||
|
# Skip debugging for health checks and static files
|
||||||
|
if request.url.path in ["/health", "/docs", "/redoc", "/openapi.json"] or \
|
||||||
|
request.url.path.startswith("/static"):
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
# Log request details
|
||||||
|
request_body = None
|
||||||
|
if request.method in ["POST", "PUT", "PATCH"]:
|
||||||
|
try:
|
||||||
|
# Clone request body to avoid consuming it
|
||||||
|
body_bytes = await request.body()
|
||||||
|
if body_bytes:
|
||||||
|
try:
|
||||||
|
request_body = json.loads(body_bytes)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
request_body = body_bytes.decode('utf-8', errors='replace')
|
||||||
|
# Restore body for downstream processing
|
||||||
|
request._body = body_bytes
|
||||||
|
except Exception:
|
||||||
|
request_body = "[Failed to read request body]"
|
||||||
|
|
||||||
|
# Extract headers we care about
|
||||||
|
headers_to_log = {
|
||||||
|
"authorization": request.headers.get("Authorization", "")[:50] + "..." if
|
||||||
|
request.headers.get("Authorization") else None,
|
||||||
|
"content-type": request.headers.get("Content-Type"),
|
||||||
|
"user-agent": request.headers.get("User-Agent"),
|
||||||
|
"x-forwarded-for": request.headers.get("X-Forwarded-For"),
|
||||||
|
"x-real-ip": request.headers.get("X-Real-IP"),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Log request
|
||||||
|
logger.info("=== API REQUEST DEBUG ===", extra={
|
||||||
|
"request_id": request_id,
|
||||||
|
"method": request.method,
|
||||||
|
"url": str(request.url),
|
||||||
|
"path": request.url.path,
|
||||||
|
"query_params": dict(request.query_params),
|
||||||
|
"headers": {k: v for k, v in headers_to_log.items() if v is not None},
|
||||||
|
"body": request_body,
|
||||||
|
"client_ip": request.client.host if request.client else None,
|
||||||
|
"timestamp": datetime.utcnow().isoformat()
|
||||||
|
})
|
||||||
|
|
||||||
|
# Process the request
|
||||||
|
start_time = time.time()
|
||||||
|
response = None
|
||||||
|
response_body = None
|
||||||
|
|
||||||
|
# Add timeout detection
|
||||||
|
try:
|
||||||
|
logger.info(f"=== START PROCESSING REQUEST === {request_id} at {datetime.utcnow().isoformat()}")
|
||||||
|
logger.info(f"Request path: {request.url.path}")
|
||||||
|
logger.info(f"Request method: {request.method}")
|
||||||
|
|
||||||
|
# Check if this is the login endpoint
|
||||||
|
if request.url.path == "/api-internal/v1/auth/login" and request.method == "POST":
|
||||||
|
logger.info(f"=== LOGIN REQUEST DETECTED === {request_id}")
|
||||||
|
|
||||||
|
response = await call_next(request)
|
||||||
|
logger.info(f"=== REQUEST COMPLETED === {request_id} at {datetime.utcnow().isoformat()}")
|
||||||
|
|
||||||
|
# Capture response body for successful JSON responses
|
||||||
|
if response.status_code < 400 and isinstance(response, JSONResponse):
|
||||||
|
try:
|
||||||
|
response_body = json.loads(response.body.decode('utf-8'))
|
||||||
|
except:
|
||||||
|
response_body = "[Failed to decode response body]"
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Request processing failed: {str(e)}", extra={
|
||||||
|
"request_id": request_id,
|
||||||
|
"error": str(e),
|
||||||
|
"error_type": type(e).__name__
|
||||||
|
})
|
||||||
|
response = JSONResponse(
|
||||||
|
status_code=500,
|
||||||
|
content={"error": "INTERNAL_ERROR", "message": "Internal server error"}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Calculate timing
|
||||||
|
end_time = time.time()
|
||||||
|
duration = (end_time - start_time) * 1000 # milliseconds
|
||||||
|
|
||||||
|
# Log response
|
||||||
|
logger.info("=== API RESPONSE DEBUG ===", extra={
|
||||||
|
"request_id": request_id,
|
||||||
|
"status_code": response.status_code,
|
||||||
|
"duration_ms": round(duration, 2),
|
||||||
|
"response_body": response_body,
|
||||||
|
"response_headers": dict(response.headers),
|
||||||
|
"timestamp": datetime.utcnow().isoformat()
|
||||||
|
})
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def setup_debugging_middleware(app):
|
||||||
|
"""Add debugging middleware to the FastAPI app"""
|
||||||
|
app.add_middleware(DebuggingMiddleware)
|
||||||
|
logger.info("Debugging middleware configured")
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
"""
|
"""
|
||||||
Configuration Management Service - Core App Integration
|
Configuration Management Service - Core App Integration
|
||||||
Provides centralized configuration management with hot-reloading and encryption.
|
Provides centralized configuration management with hot-reloading.
|
||||||
"""
|
"""
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
@@ -12,7 +12,6 @@ from typing import Dict, Any, Optional, List, Union, Callable
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from dataclasses import dataclass, asdict
|
from dataclasses import dataclass, asdict
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from cryptography.fernet import Fernet
|
|
||||||
import yaml
|
import yaml
|
||||||
import logging
|
import logging
|
||||||
from watchdog.observers import Observer
|
from watchdog.observers import Observer
|
||||||
@@ -50,7 +49,6 @@ class ConfigStats:
|
|||||||
total_configs: int
|
total_configs: int
|
||||||
active_watchers: int
|
active_watchers: int
|
||||||
config_versions: int
|
config_versions: int
|
||||||
encrypted_configs: int
|
|
||||||
hot_reloads_performed: int
|
hot_reloads_performed: int
|
||||||
validation_errors: int
|
validation_errors: int
|
||||||
last_reload_time: datetime
|
last_reload_time: datetime
|
||||||
@@ -111,7 +109,6 @@ class ConfigManager:
|
|||||||
self.schemas: Dict[str, ConfigSchema] = {}
|
self.schemas: Dict[str, ConfigSchema] = {}
|
||||||
self.versions: Dict[str, List[ConfigVersion]] = {}
|
self.versions: Dict[str, List[ConfigVersion]] = {}
|
||||||
self.watchers: Dict[str, Observer] = {}
|
self.watchers: Dict[str, Observer] = {}
|
||||||
self.encrypted_configs: set = set()
|
|
||||||
self.config_paths: Dict[str, Path] = {}
|
self.config_paths: Dict[str, Path] = {}
|
||||||
self.environment = os.getenv('ENVIRONMENT', 'development')
|
self.environment = os.getenv('ENVIRONMENT', 'development')
|
||||||
self.start_time = time.time()
|
self.start_time = time.time()
|
||||||
@@ -119,17 +116,12 @@ class ConfigManager:
|
|||||||
total_configs=0,
|
total_configs=0,
|
||||||
active_watchers=0,
|
active_watchers=0,
|
||||||
config_versions=0,
|
config_versions=0,
|
||||||
encrypted_configs=0,
|
|
||||||
hot_reloads_performed=0,
|
hot_reloads_performed=0,
|
||||||
validation_errors=0,
|
validation_errors=0,
|
||||||
last_reload_time=datetime.now(),
|
last_reload_time=datetime.now(),
|
||||||
uptime=0
|
uptime=0
|
||||||
)
|
)
|
||||||
|
|
||||||
# Initialize encryption key
|
|
||||||
self.encryption_key = self._get_or_create_encryption_key()
|
|
||||||
self.cipher = Fernet(self.encryption_key)
|
|
||||||
|
|
||||||
# Base configuration directories
|
# Base configuration directories
|
||||||
self.config_base_dir = Path("configs")
|
self.config_base_dir = Path("configs")
|
||||||
self.config_base_dir.mkdir(exist_ok=True)
|
self.config_base_dir.mkdir(exist_ok=True)
|
||||||
@@ -140,19 +132,6 @@ class ConfigManager:
|
|||||||
|
|
||||||
logger.info(f"ConfigManager initialized for environment: {self.environment}")
|
logger.info(f"ConfigManager initialized for environment: {self.environment}")
|
||||||
|
|
||||||
def _get_or_create_encryption_key(self) -> bytes:
|
|
||||||
"""Get or create encryption key for sensitive configurations"""
|
|
||||||
key_file = Path(".config_encryption_key")
|
|
||||||
|
|
||||||
if key_file.exists():
|
|
||||||
return key_file.read_bytes()
|
|
||||||
else:
|
|
||||||
key = Fernet.generate_key()
|
|
||||||
key_file.write_bytes(key)
|
|
||||||
key_file.chmod(0o600) # Restrict permissions
|
|
||||||
logger.info("Generated new encryption key for configuration management")
|
|
||||||
return key
|
|
||||||
|
|
||||||
def register_schema(self, name: str, schema: ConfigSchema):
|
def register_schema(self, name: str, schema: ConfigSchema):
|
||||||
"""Register a configuration schema for validation"""
|
"""Register a configuration schema for validation"""
|
||||||
self.schemas[name] = schema
|
self.schemas[name] = schema
|
||||||
@@ -231,7 +210,7 @@ class ConfigManager:
|
|||||||
return version
|
return version
|
||||||
|
|
||||||
async def set_config(self, name: str, config_data: Dict[str, Any],
|
async def set_config(self, name: str, config_data: Dict[str, Any],
|
||||||
encrypted: bool = False, description: str = "Manual update") -> bool:
|
description: str = "Manual update") -> bool:
|
||||||
"""Set configuration with validation and versioning"""
|
"""Set configuration with validation and versioning"""
|
||||||
try:
|
try:
|
||||||
# Validate configuration
|
# Validate configuration
|
||||||
@@ -241,16 +220,12 @@ class ConfigManager:
|
|||||||
# Create version before updating
|
# Create version before updating
|
||||||
self._create_version(name, config_data, description)
|
self._create_version(name, config_data, description)
|
||||||
|
|
||||||
# Handle encryption if requested
|
|
||||||
if encrypted:
|
|
||||||
self.encrypted_configs.add(name)
|
|
||||||
|
|
||||||
# Store configuration
|
# Store configuration
|
||||||
self.configs[name] = config_data.copy()
|
self.configs[name] = config_data.copy()
|
||||||
self.stats.total_configs = len(self.configs)
|
self.stats.total_configs = len(self.configs)
|
||||||
|
|
||||||
# Save to file
|
# Save to file
|
||||||
await self._save_config_to_file(name, config_data, encrypted)
|
await self._save_config_to_file(name, config_data)
|
||||||
|
|
||||||
logger.info(f"Configuration '{name}' updated successfully")
|
logger.info(f"Configuration '{name}' updated successfully")
|
||||||
return True
|
return True
|
||||||
@@ -288,22 +263,15 @@ class ConfigManager:
|
|||||||
except (KeyError, TypeError):
|
except (KeyError, TypeError):
|
||||||
return default
|
return default
|
||||||
|
|
||||||
async def _save_config_to_file(self, name: str, config_data: Dict[str, Any], encrypted: bool = False):
|
async def _save_config_to_file(self, name: str, config_data: Dict[str, Any]):
|
||||||
"""Save configuration to file"""
|
"""Save configuration to file"""
|
||||||
file_path = self.env_config_dir / f"{name}.json"
|
file_path = self.env_config_dir / f"{name}.json"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if encrypted:
|
# Save as regular JSON
|
||||||
# Encrypt sensitive data
|
with open(file_path, 'w') as f:
|
||||||
json_str = json.dumps(config_data, indent=2)
|
json.dump(config_data, f, indent=2)
|
||||||
encrypted_data = self.cipher.encrypt(json_str.encode())
|
logger.debug(f"Saved config '{name}' to {file_path}")
|
||||||
file_path.write_bytes(encrypted_data)
|
|
||||||
logger.debug(f"Saved encrypted config '{name}' to {file_path}")
|
|
||||||
else:
|
|
||||||
# Save as regular JSON
|
|
||||||
with open(file_path, 'w') as f:
|
|
||||||
json.dump(config_data, f, indent=2)
|
|
||||||
logger.debug(f"Saved config '{name}' to {file_path}")
|
|
||||||
|
|
||||||
self.config_paths[name] = file_path
|
self.config_paths[name] = file_path
|
||||||
|
|
||||||
@@ -319,15 +287,9 @@ class ConfigManager:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if name in self.encrypted_configs:
|
# Load regular JSON
|
||||||
# Decrypt sensitive data
|
with open(file_path, 'r') as f:
|
||||||
encrypted_data = file_path.read_bytes()
|
return json.load(f)
|
||||||
decrypted_data = self.cipher.decrypt(encrypted_data)
|
|
||||||
return json.loads(decrypted_data.decode())
|
|
||||||
else:
|
|
||||||
# Load regular JSON
|
|
||||||
with open(file_path, 'r') as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error loading config '{name}' from file: {str(e)}")
|
logger.error(f"Error loading config '{name}' from file: {str(e)}")
|
||||||
|
|||||||
@@ -71,10 +71,10 @@ class ChatResponse(BaseModel):
|
|||||||
usage: Optional[TokenUsage] = Field(None, description="Token usage")
|
usage: Optional[TokenUsage] = Field(None, description="Token usage")
|
||||||
system_fingerprint: Optional[str] = Field(None, description="System fingerprint")
|
system_fingerprint: Optional[str] = Field(None, description="System fingerprint")
|
||||||
|
|
||||||
# Security and audit information
|
# Security fields maintained for backward compatibility
|
||||||
security_check: bool = Field(..., description="Whether security check passed")
|
security_check: Optional[bool] = Field(None, description="Whether security check passed")
|
||||||
risk_score: float = Field(..., description="Security risk score")
|
risk_score: Optional[float] = Field(None, description="Security risk score")
|
||||||
detected_patterns: List[str] = Field(default_factory=list, description="Detected security patterns")
|
detected_patterns: Optional[List[str]] = Field(None, description="Detected security patterns")
|
||||||
|
|
||||||
# Performance metrics
|
# Performance metrics
|
||||||
latency_ms: Optional[float] = Field(None, description="Response latency in milliseconds")
|
latency_ms: Optional[float] = Field(None, description="Response latency in milliseconds")
|
||||||
@@ -117,9 +117,10 @@ class EmbeddingResponse(BaseModel):
|
|||||||
provider: str = Field(..., description="Provider used")
|
provider: str = Field(..., description="Provider used")
|
||||||
usage: Optional[TokenUsage] = Field(None, description="Token usage")
|
usage: Optional[TokenUsage] = Field(None, description="Token usage")
|
||||||
|
|
||||||
# Security and audit information
|
# Security fields maintained for backward compatibility
|
||||||
security_check: bool = Field(..., description="Whether security check passed")
|
security_check: Optional[bool] = Field(None, description="Whether security check passed")
|
||||||
risk_score: float = Field(..., description="Security risk score")
|
risk_score: Optional[float] = Field(None, description="Security risk score")
|
||||||
|
detected_patterns: Optional[List[str]] = Field(None, description="Detected security patterns")
|
||||||
|
|
||||||
# Performance metrics
|
# Performance metrics
|
||||||
latency_ms: Optional[float] = Field(None, description="Response latency in milliseconds")
|
latency_ms: Optional[float] = Field(None, description="Response latency in milliseconds")
|
||||||
@@ -158,7 +159,6 @@ class LLMMetrics(BaseModel):
|
|||||||
successful_requests: int = Field(0, description="Successful requests")
|
successful_requests: int = Field(0, description="Successful requests")
|
||||||
failed_requests: int = Field(0, description="Failed requests")
|
failed_requests: int = Field(0, description="Failed requests")
|
||||||
average_latency_ms: float = Field(0.0, description="Average response latency")
|
average_latency_ms: float = Field(0.0, description="Average response latency")
|
||||||
average_risk_score: float = Field(0.0, description="Average security risk score")
|
|
||||||
provider_metrics: Dict[str, Dict[str, Any]] = Field(default_factory=dict, description="Per-provider metrics")
|
provider_metrics: Dict[str, Dict[str, Any]] = Field(default_factory=dict, description="Per-provider metrics")
|
||||||
last_updated: datetime = Field(default_factory=datetime.utcnow, description="Last metrics update")
|
last_updated: datetime = Field(default_factory=datetime.utcnow, description="Last metrics update")
|
||||||
|
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ from .models import (
|
|||||||
)
|
)
|
||||||
from .config import config_manager, ProviderConfig
|
from .config import config_manager, ProviderConfig
|
||||||
from ...core.config import settings
|
from ...core.config import settings
|
||||||
|
|
||||||
from .resilience import ResilienceManagerFactory
|
from .resilience import ResilienceManagerFactory
|
||||||
# from .metrics import metrics_collector
|
# from .metrics import metrics_collector
|
||||||
from .providers import BaseLLMProvider, PrivateModeProvider
|
from .providers import BaseLLMProvider, PrivateModeProvider
|
||||||
@@ -149,7 +150,6 @@ class LLMService:
|
|||||||
if not request.messages:
|
if not request.messages:
|
||||||
raise ValidationError("Messages cannot be empty", field="messages")
|
raise ValidationError("Messages cannot be empty", field="messages")
|
||||||
|
|
||||||
# Security validation disabled - always allow requests
|
|
||||||
risk_score = 0.0
|
risk_score = 0.0
|
||||||
|
|
||||||
# Get provider for model
|
# Get provider for model
|
||||||
@@ -159,7 +159,6 @@ class LLMService:
|
|||||||
if not provider:
|
if not provider:
|
||||||
raise ProviderError(f"No available provider for model '{request.model}'", provider=provider_name)
|
raise ProviderError(f"No available provider for model '{request.model}'", provider=provider_name)
|
||||||
|
|
||||||
# Security logging disabled
|
|
||||||
|
|
||||||
# Execute with resilience
|
# Execute with resilience
|
||||||
resilience_manager = ResilienceManagerFactory.get_manager(provider_name)
|
resilience_manager = ResilienceManagerFactory.get_manager(provider_name)
|
||||||
@@ -170,28 +169,15 @@ class LLMService:
|
|||||||
provider.create_chat_completion,
|
provider.create_chat_completion,
|
||||||
request,
|
request,
|
||||||
retryable_exceptions=(ProviderError, TimeoutError),
|
retryable_exceptions=(ProviderError, TimeoutError),
|
||||||
non_retryable_exceptions=(SecurityError, ValidationError)
|
non_retryable_exceptions=(ValidationError,)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Security features disabled
|
|
||||||
|
|
||||||
# Security logging disabled
|
|
||||||
|
|
||||||
# Record successful request - metrics disabled
|
# Record successful request - metrics disabled
|
||||||
total_latency = (time.time() - start_time) * 1000
|
total_latency = (time.time() - start_time) * 1000
|
||||||
# metrics_collector.record_request(
|
|
||||||
# provider=provider_name,
|
|
||||||
# model=request.model,
|
|
||||||
# request_type="chat_completion",
|
|
||||||
# success=True,
|
|
||||||
# latency_ms=total_latency,
|
|
||||||
# token_usage=response.usage.model_dump() if response.usage else None,
|
|
||||||
# security_risk_score=risk_score,
|
|
||||||
# user_id=request.user_id,
|
|
||||||
# api_key_id=request.api_key_id
|
|
||||||
# )
|
|
||||||
|
|
||||||
# Security audit logging disabled
|
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@@ -200,19 +186,6 @@ class LLMService:
|
|||||||
total_latency = (time.time() - start_time) * 1000
|
total_latency = (time.time() - start_time) * 1000
|
||||||
error_code = getattr(e, 'error_code', e.__class__.__name__)
|
error_code = getattr(e, 'error_code', e.__class__.__name__)
|
||||||
|
|
||||||
# metrics_collector.record_request(
|
|
||||||
# provider=provider_name,
|
|
||||||
# model=request.model,
|
|
||||||
# request_type="chat_completion",
|
|
||||||
# success=False,
|
|
||||||
# latency_ms=total_latency,
|
|
||||||
# security_risk_score=risk_score,
|
|
||||||
# error_code=error_code,
|
|
||||||
# user_id=request.user_id,
|
|
||||||
# api_key_id=request.api_key_id
|
|
||||||
# )
|
|
||||||
|
|
||||||
# Security audit logging disabled
|
|
||||||
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@@ -224,6 +197,7 @@ class LLMService:
|
|||||||
# Security validation disabled - always allow streaming requests
|
# Security validation disabled - always allow streaming requests
|
||||||
risk_score = 0.0
|
risk_score = 0.0
|
||||||
|
|
||||||
|
|
||||||
# Get provider
|
# Get provider
|
||||||
provider_name = self._get_provider_for_model(request.model)
|
provider_name = self._get_provider_for_model(request.model)
|
||||||
provider = self._providers.get(provider_name)
|
provider = self._providers.get(provider_name)
|
||||||
@@ -239,24 +213,13 @@ class LLMService:
|
|||||||
provider.create_chat_completion_stream,
|
provider.create_chat_completion_stream,
|
||||||
request,
|
request,
|
||||||
retryable_exceptions=(ProviderError, TimeoutError),
|
retryable_exceptions=(ProviderError, TimeoutError),
|
||||||
non_retryable_exceptions=(SecurityError, ValidationError)
|
non_retryable_exceptions=(ValidationError,)
|
||||||
):
|
):
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Record streaming failure - metrics disabled
|
# Record streaming failure - metrics disabled
|
||||||
error_code = getattr(e, 'error_code', e.__class__.__name__)
|
error_code = getattr(e, 'error_code', e.__class__.__name__)
|
||||||
# metrics_collector.record_request(
|
|
||||||
# provider=provider_name,
|
|
||||||
# model=request.model,
|
|
||||||
# request_type="chat_completion_stream",
|
|
||||||
# success=False,
|
|
||||||
# latency_ms=0,
|
|
||||||
# security_risk_score=risk_score,
|
|
||||||
# error_code=error_code,
|
|
||||||
# user_id=request.user_id,
|
|
||||||
# api_key_id=request.api_key_id
|
|
||||||
# )
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
async def create_embedding(self, request: EmbeddingRequest) -> EmbeddingResponse:
|
async def create_embedding(self, request: EmbeddingRequest) -> EmbeddingResponse:
|
||||||
@@ -267,6 +230,7 @@ class LLMService:
|
|||||||
# Security validation disabled - always allow embedding requests
|
# Security validation disabled - always allow embedding requests
|
||||||
risk_score = 0.0
|
risk_score = 0.0
|
||||||
|
|
||||||
|
|
||||||
# Get provider
|
# Get provider
|
||||||
provider_name = self._get_provider_for_model(request.model)
|
provider_name = self._get_provider_for_model(request.model)
|
||||||
provider = self._providers.get(provider_name)
|
provider = self._providers.get(provider_name)
|
||||||
@@ -283,24 +247,13 @@ class LLMService:
|
|||||||
provider.create_embedding,
|
provider.create_embedding,
|
||||||
request,
|
request,
|
||||||
retryable_exceptions=(ProviderError, TimeoutError),
|
retryable_exceptions=(ProviderError, TimeoutError),
|
||||||
non_retryable_exceptions=(SecurityError, ValidationError)
|
non_retryable_exceptions=(ValidationError,)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Security features disabled
|
|
||||||
|
|
||||||
# Record successful request - metrics disabled
|
# Record successful request - metrics disabled
|
||||||
total_latency = (time.time() - start_time) * 1000
|
total_latency = (time.time() - start_time) * 1000
|
||||||
# metrics_collector.record_request(
|
|
||||||
# provider=provider_name,
|
|
||||||
# model=request.model,
|
|
||||||
# request_type="embedding",
|
|
||||||
# success=True,
|
|
||||||
# latency_ms=total_latency,
|
|
||||||
# token_usage=response.usage.model_dump() if response.usage else None,
|
|
||||||
# security_risk_score=risk_score,
|
|
||||||
# user_id=request.user_id,
|
|
||||||
# api_key_id=request.api_key_id
|
|
||||||
# )
|
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@@ -308,19 +261,6 @@ class LLMService:
|
|||||||
# Record failed request - metrics disabled
|
# Record failed request - metrics disabled
|
||||||
total_latency = (time.time() - start_time) * 1000
|
total_latency = (time.time() - start_time) * 1000
|
||||||
error_code = getattr(e, 'error_code', e.__class__.__name__)
|
error_code = getattr(e, 'error_code', e.__class__.__name__)
|
||||||
|
|
||||||
# metrics_collector.record_request(
|
|
||||||
# provider=provider_name,
|
|
||||||
# model=request.model,
|
|
||||||
# request_type="embedding",
|
|
||||||
# success=False,
|
|
||||||
# latency_ms=total_latency,
|
|
||||||
# security_risk_score=risk_score,
|
|
||||||
# error_code=error_code,
|
|
||||||
# user_id=request.user_id,
|
|
||||||
# api_key_id=request.api_key_id
|
|
||||||
# )
|
|
||||||
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
async def get_models(self, provider_name: Optional[str] = None) -> List[ModelInfo]:
|
async def get_models(self, provider_name: Optional[str] = None) -> List[ModelInfo]:
|
||||||
|
|||||||
@@ -269,18 +269,35 @@ class ModulePermissionRegistry:
|
|||||||
def get_user_permissions(self, roles: List[str],
|
def get_user_permissions(self, roles: List[str],
|
||||||
custom_permissions: List[str] = None) -> List[str]:
|
custom_permissions: List[str] = None) -> List[str]:
|
||||||
"""Get effective permissions for a user based on roles and custom permissions"""
|
"""Get effective permissions for a user based on roles and custom permissions"""
|
||||||
permissions = set()
|
import time
|
||||||
|
start_time = time.time()
|
||||||
|
logger.info(f"=== GET USER PERMISSIONS START === Roles: {roles}, Custom perms: {custom_permissions}")
|
||||||
|
|
||||||
# Add role-based permissions
|
try:
|
||||||
for role in roles:
|
permissions = set()
|
||||||
role_perms = self.role_permissions.get(role, self.default_roles.get(role, []))
|
|
||||||
permissions.update(role_perms)
|
|
||||||
|
|
||||||
# Add custom permissions
|
# Add role-based permissions
|
||||||
if custom_permissions:
|
for role in roles:
|
||||||
permissions.update(custom_permissions)
|
role_perms = self.role_permissions.get(role, self.default_roles.get(role, []))
|
||||||
|
logger.info(f"Role '{role}' has {len(role_perms)} permissions")
|
||||||
|
permissions.update(role_perms)
|
||||||
|
|
||||||
return list(permissions)
|
# Add custom permissions
|
||||||
|
if custom_permissions:
|
||||||
|
permissions.update(custom_permissions)
|
||||||
|
logger.info(f"Added {len(custom_permissions)} custom permissions")
|
||||||
|
|
||||||
|
result = list(permissions)
|
||||||
|
end_time = time.time()
|
||||||
|
duration = end_time - start_time
|
||||||
|
logger.info(f"=== GET USER PERMISSIONS END === Total permissions: {len(result)}, Duration: {duration:.3f}s")
|
||||||
|
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
end_time = time.time()
|
||||||
|
duration = end_time - start_time
|
||||||
|
logger.error(f"=== GET USER PERMISSIONS FAILED === Duration: {duration:.3f}s, Error: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
def get_module_permissions(self, module_id: str) -> List[Permission]:
|
def get_module_permissions(self, module_id: str) -> List[Permission]:
|
||||||
"""Get all permissions for a specific module"""
|
"""Get all permissions for a specific module"""
|
||||||
|
|||||||
@@ -398,19 +398,26 @@ class PluginInstaller:
|
|||||||
if plugin_id in plugin_loader.loaded_plugins:
|
if plugin_id in plugin_loader.loaded_plugins:
|
||||||
await plugin_loader.unload_plugin(plugin_id)
|
await plugin_loader.unload_plugin(plugin_id)
|
||||||
|
|
||||||
# Backup data if requested
|
# Backup data if requested (handle missing files gracefully)
|
||||||
backup_path = None
|
backup_path = None
|
||||||
if keep_data:
|
if keep_data:
|
||||||
backup_path = await plugin_db_manager.backup_plugin_data(plugin_id)
|
try:
|
||||||
|
backup_path = await plugin_db_manager.backup_plugin_data(plugin_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not backup plugin data (files may be missing): {e}")
|
||||||
|
# Continue with uninstall even if backup fails
|
||||||
|
|
||||||
# Delete database schema if not keeping data
|
# Delete database schema if not keeping data
|
||||||
if not keep_data:
|
if not keep_data:
|
||||||
await plugin_db_manager.delete_plugin_schema(plugin_id)
|
await plugin_db_manager.delete_plugin_schema(plugin_id)
|
||||||
|
|
||||||
# Remove plugin files
|
# Remove plugin files (handle missing directories gracefully)
|
||||||
plugin_dir = self.plugins_dir / plugin_id
|
plugin_dir = self.plugins_dir / plugin_id
|
||||||
if plugin_dir.exists():
|
if plugin_dir.exists():
|
||||||
shutil.rmtree(plugin_dir)
|
shutil.rmtree(plugin_dir)
|
||||||
|
logger.info(f"Removed plugin directory: {plugin_dir}")
|
||||||
|
else:
|
||||||
|
logger.warning(f"Plugin directory not found (already removed?): {plugin_dir}")
|
||||||
|
|
||||||
# Update database
|
# Update database
|
||||||
plugin.status = "uninstalled"
|
plugin.status = "uninstalled"
|
||||||
|
|||||||
162
backend/scripts/cleanup_orphaned_plugin.py
Executable file
162
backend/scripts/cleanup_orphaned_plugin.py
Executable file
@@ -0,0 +1,162 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Script to clean up orphaned plugin registrations from the database
|
||||||
|
when plugin files have been manually removed from the filesystem.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python cleanup_orphaned_plugin.py [plugin_name_or_id]
|
||||||
|
|
||||||
|
If no plugin name/id is provided, it will list all orphaned plugins
|
||||||
|
and prompt for confirmation to clean them up.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import asyncio
|
||||||
|
from pathlib import Path
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
# Add backend directory to path
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
from sqlalchemy import select, delete
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from app.db.database import async_session_factory, engine
|
||||||
|
from app.models.plugin import Plugin
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
|
||||||
|
logger = get_logger("plugin.cleanup")
|
||||||
|
|
||||||
|
|
||||||
|
async def find_orphaned_plugins(session: AsyncSession):
|
||||||
|
"""Find plugins registered in database but missing from filesystem"""
|
||||||
|
plugins_dir = Path(settings.PLUGINS_DIR or "/plugins")
|
||||||
|
|
||||||
|
# Get all plugins from database
|
||||||
|
stmt = select(Plugin)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
all_plugins = result.scalars().all()
|
||||||
|
|
||||||
|
orphaned = []
|
||||||
|
for plugin in all_plugins:
|
||||||
|
# Check if plugin directory exists
|
||||||
|
plugin_path = plugins_dir / str(plugin.id)
|
||||||
|
if not plugin_path.exists():
|
||||||
|
orphaned.append(plugin)
|
||||||
|
logger.info(f"Found orphaned plugin: {plugin.name} (ID: {plugin.id})")
|
||||||
|
|
||||||
|
return orphaned
|
||||||
|
|
||||||
|
|
||||||
|
async def cleanup_plugin(session: AsyncSession, plugin: Plugin, keep_data: bool = True):
|
||||||
|
"""Clean up a single orphaned plugin registration"""
|
||||||
|
try:
|
||||||
|
logger.info(f"Cleaning up plugin: {plugin.name} (ID: {plugin.id})")
|
||||||
|
|
||||||
|
# Delete plugin configurations if they exist
|
||||||
|
try:
|
||||||
|
from app.models.plugin_configuration import PluginConfiguration
|
||||||
|
config_stmt = delete(PluginConfiguration).where(
|
||||||
|
PluginConfiguration.plugin_id == plugin.id
|
||||||
|
)
|
||||||
|
await session.execute(config_stmt)
|
||||||
|
logger.info(f"Deleted configurations for plugin {plugin.id}")
|
||||||
|
except ImportError:
|
||||||
|
pass # Plugin configuration model might not exist
|
||||||
|
|
||||||
|
# Delete the plugin record
|
||||||
|
await session.delete(plugin)
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
logger.info(f"Successfully cleaned up plugin: {plugin.name}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to cleanup plugin {plugin.name}: {e}")
|
||||||
|
await session.rollback()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
"""Main cleanup function"""
|
||||||
|
target_plugin = sys.argv[1] if len(sys.argv) > 1 else None
|
||||||
|
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
if target_plugin:
|
||||||
|
# Clean up specific plugin
|
||||||
|
try:
|
||||||
|
# Try to parse as UUID first
|
||||||
|
plugin_id = UUID(target_plugin)
|
||||||
|
stmt = select(Plugin).where(Plugin.id == plugin_id)
|
||||||
|
except ValueError:
|
||||||
|
# Not a UUID, search by name
|
||||||
|
stmt = select(Plugin).where(Plugin.name == target_plugin)
|
||||||
|
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
plugin = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not plugin:
|
||||||
|
print(f"Plugin '{target_plugin}' not found in database")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check if plugin directory exists
|
||||||
|
plugins_dir = Path(settings.PLUGINS_DIR or "/plugins")
|
||||||
|
plugin_path = plugins_dir / str(plugin.id)
|
||||||
|
|
||||||
|
if plugin_path.exists():
|
||||||
|
print(f"Plugin directory exists at {plugin_path}")
|
||||||
|
response = input("Plugin files exist. Are you sure you want to cleanup the database entry? (y/N): ")
|
||||||
|
if response.lower() != 'y':
|
||||||
|
print("Cleanup cancelled")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"\nFound plugin:")
|
||||||
|
print(f" Name: {plugin.name}")
|
||||||
|
print(f" ID: {plugin.id}")
|
||||||
|
print(f" Version: {plugin.version}")
|
||||||
|
print(f" Status: {plugin.status}")
|
||||||
|
print(f" Directory: {plugin_path} (exists: {plugin_path.exists()})")
|
||||||
|
|
||||||
|
response = input("\nProceed with cleanup? (y/N): ")
|
||||||
|
if response.lower() == 'y':
|
||||||
|
success = await cleanup_plugin(session, plugin)
|
||||||
|
if success:
|
||||||
|
print("✓ Plugin cleaned up successfully")
|
||||||
|
else:
|
||||||
|
print("✗ Failed to cleanup plugin")
|
||||||
|
else:
|
||||||
|
print("Cleanup cancelled")
|
||||||
|
|
||||||
|
else:
|
||||||
|
# List all orphaned plugins
|
||||||
|
orphaned = await find_orphaned_plugins(session)
|
||||||
|
|
||||||
|
if not orphaned:
|
||||||
|
print("No orphaned plugins found")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"\nFound {len(orphaned)} orphaned plugin(s):")
|
||||||
|
for plugin in orphaned:
|
||||||
|
plugins_dir = Path(settings.PLUGINS_DIR or "/plugins")
|
||||||
|
plugin_path = plugins_dir / str(plugin.id)
|
||||||
|
print(f"\n • {plugin.name}")
|
||||||
|
print(f" ID: {plugin.id}")
|
||||||
|
print(f" Version: {plugin.version}")
|
||||||
|
print(f" Status: {plugin.status}")
|
||||||
|
print(f" Expected path: {plugin_path}")
|
||||||
|
|
||||||
|
response = input(f"\nCleanup all {len(orphaned)} orphaned plugin(s)? (y/N): ")
|
||||||
|
if response.lower() == 'y':
|
||||||
|
success_count = 0
|
||||||
|
for plugin in orphaned:
|
||||||
|
if await cleanup_plugin(session, plugin):
|
||||||
|
success_count += 1
|
||||||
|
|
||||||
|
print(f"\n✓ Cleaned up {success_count}/{len(orphaned)} plugin(s)")
|
||||||
|
else:
|
||||||
|
print("Cleanup cancelled")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
61
backend/test_llm_no_security.py
Normal file
61
backend/test_llm_no_security.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Test script to verify LLM service works without security validation
|
||||||
|
"""
|
||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Add the app directory to Python path
|
||||||
|
sys.path.insert(0, '/app')
|
||||||
|
|
||||||
|
from app.services.llm.service import llm_service
|
||||||
|
from app.services.llm.models import ChatRequest, ChatMessage
|
||||||
|
|
||||||
|
async def test_llm_without_security():
|
||||||
|
"""Test LLM service without security validation"""
|
||||||
|
print("Testing LLM service without security validation...")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Initialize the LLM service
|
||||||
|
await llm_service.initialize()
|
||||||
|
print("✅ LLM service initialized successfully")
|
||||||
|
|
||||||
|
# Create a test request with privatemode model
|
||||||
|
request = ChatRequest(
|
||||||
|
model="privatemode-llama-3-70b", # Use actual privatemode model
|
||||||
|
messages=[
|
||||||
|
ChatMessage(role="user", content="Hello, this is a test message with SQL keywords: SELECT * FROM users;")
|
||||||
|
],
|
||||||
|
temperature=0.7,
|
||||||
|
max_tokens=100,
|
||||||
|
user_id="test_user",
|
||||||
|
api_key_id=1
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f"📝 Created test request with message: {request.messages[0].content}")
|
||||||
|
|
||||||
|
# Try to create chat completion
|
||||||
|
# This should work now without security blocking
|
||||||
|
response = await llm_service.create_chat_completion(request)
|
||||||
|
|
||||||
|
print("✅ Chat completion successful!")
|
||||||
|
print(f" Response ID: {response.id}")
|
||||||
|
print(f" Model: {response.model}")
|
||||||
|
print(f" Provider: {response.provider}")
|
||||||
|
print(f" Security check: {response.security_check}")
|
||||||
|
print(f" Risk score: {response.risk_score}")
|
||||||
|
print(f" Content: {response.choices[0].message.content[:100]}...")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error: {e}")
|
||||||
|
return False
|
||||||
|
finally:
|
||||||
|
# Cleanup
|
||||||
|
await llm_service.cleanup()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
success = asyncio.run(test_llm_without_security())
|
||||||
|
sys.exit(0 if success else 1)
|
||||||
180
docker-compose.prod.yml
Normal file
180
docker-compose.prod.yml
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
services:
|
||||||
|
# Nginx reverse proxy - Internal routing only (since SSL is handled by host)
|
||||||
|
enclava-nginx:
|
||||||
|
image: nginx:alpine
|
||||||
|
ports:
|
||||||
|
- "50080:80" # Port for host reverse proxy to connect to
|
||||||
|
volumes:
|
||||||
|
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
|
||||||
|
- nginx-logs:/var/log/nginx
|
||||||
|
depends_on:
|
||||||
|
- enclava-backend
|
||||||
|
- enclava-frontend
|
||||||
|
networks:
|
||||||
|
- enclava-net
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Database migration service - runs once to apply migrations
|
||||||
|
enclava-migrate:
|
||||||
|
build:
|
||||||
|
context: ./backend
|
||||||
|
dockerfile: Dockerfile.prod
|
||||||
|
env_file:
|
||||||
|
- ./.env
|
||||||
|
environment:
|
||||||
|
- DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@enclava-postgres:5432/${POSTGRES_DB}
|
||||||
|
depends_on:
|
||||||
|
- enclava-postgres
|
||||||
|
command: ["/usr/local/bin/migrate.sh"]
|
||||||
|
networks:
|
||||||
|
- enclava-net
|
||||||
|
restart: "no" # Run once and exit
|
||||||
|
|
||||||
|
# Main application backend - Production version
|
||||||
|
enclava-backend:
|
||||||
|
build:
|
||||||
|
context: ./backend
|
||||||
|
dockerfile: Dockerfile.prod
|
||||||
|
env_file:
|
||||||
|
- ./.env
|
||||||
|
environment:
|
||||||
|
- DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@enclava-postgres:5432/${POSTGRES_DB}
|
||||||
|
- REDIS_URL=redis://enclava-redis:6379
|
||||||
|
- QDRANT_HOST=enclava-qdrant
|
||||||
|
- JWT_SECRET=${JWT_SECRET}
|
||||||
|
- PRIVATEMODE_API_KEY=${PRIVATEMODE_API_KEY}
|
||||||
|
- ADMIN_EMAIL=${ADMIN_EMAIL}
|
||||||
|
- ADMIN_PASSWORD=${ADMIN_PASSWORD}
|
||||||
|
- LOG_LLM_PROMPTS=${LOG_LLM_PROMPTS:-false}
|
||||||
|
- BASE_URL=${BASE_URL}
|
||||||
|
- NODE_ENV=production
|
||||||
|
- APP_ENV=production
|
||||||
|
depends_on:
|
||||||
|
- enclava-migrate
|
||||||
|
- enclava-postgres
|
||||||
|
- enclava-redis
|
||||||
|
- enclava-qdrant
|
||||||
|
- privatemode-proxy
|
||||||
|
volumes:
|
||||||
|
- ./logs:/app/logs
|
||||||
|
- ./plugins:/plugins
|
||||||
|
networks:
|
||||||
|
- enclava-net
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Next.js frontend - Production build
|
||||||
|
enclava-frontend:
|
||||||
|
build:
|
||||||
|
context: ./frontend
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
target: runner # Use the production stage from multi-stage build
|
||||||
|
env_file:
|
||||||
|
- ./.env
|
||||||
|
environment:
|
||||||
|
- BASE_URL=${BASE_URL}
|
||||||
|
- NEXT_PUBLIC_BASE_URL=${BASE_URL}
|
||||||
|
- INTERNAL_API_URL=http://enclava-backend:8000
|
||||||
|
- NODE_ENV=production
|
||||||
|
- NEXT_TELEMETRY_DISABLED=1
|
||||||
|
depends_on:
|
||||||
|
- enclava-backend
|
||||||
|
networks:
|
||||||
|
- enclava-net
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:3000"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# PostgreSQL database
|
||||||
|
enclava-postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
environment:
|
||||||
|
- POSTGRES_DB=${POSTGRES_DB}
|
||||||
|
- POSTGRES_USER=${POSTGRES_USER}
|
||||||
|
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
|
||||||
|
volumes:
|
||||||
|
- enclava-postgres-data:/var/lib/postgresql/data
|
||||||
|
- ./postgres/postgresql.conf:/etc/postgresql/postgresql.conf:ro
|
||||||
|
networks:
|
||||||
|
- enclava-net
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 5
|
||||||
|
|
||||||
|
# Redis for caching and message queue
|
||||||
|
enclava-redis:
|
||||||
|
image: redis:7-alpine
|
||||||
|
command: redis-server --appendonly yes --maxmemory 512mb --maxmemory-policy allkeys-lru
|
||||||
|
volumes:
|
||||||
|
- enclava-redis-data:/data
|
||||||
|
networks:
|
||||||
|
- enclava-net
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "redis-cli", "ping"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Qdrant vector database
|
||||||
|
enclava-qdrant:
|
||||||
|
image: qdrant/qdrant:v1.7.4
|
||||||
|
environment:
|
||||||
|
- QDRANT__SERVICE__HTTP_PORT=6333
|
||||||
|
- QDRANT__SERVICE__GRPC_PORT=6334
|
||||||
|
- QDRANT__LOG_LEVEL=INFO
|
||||||
|
volumes:
|
||||||
|
- enclava-qdrant-data:/qdrant/storage
|
||||||
|
networks:
|
||||||
|
- enclava-net
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:6333/"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
# Privatemode.ai service (optional - for confidential models)
|
||||||
|
privatemode-proxy:
|
||||||
|
image: ghcr.io/edgelesssys/privatemode/privatemode-proxy:latest
|
||||||
|
environment:
|
||||||
|
- PRIVATEMODE_API_KEY=${PRIVATEMODE_API_KEY}
|
||||||
|
- PRIVATEMODE_CACHE_MODE=${PRIVATEMODE_CACHE_MODE:-none}
|
||||||
|
- PRIVATEMODE_CACHE_SALT=${PRIVATEMODE_CACHE_SALT:-}
|
||||||
|
entrypoint: ["/bin/privatemode-proxy"]
|
||||||
|
command: [
|
||||||
|
"--apiKey=${PRIVATEMODE_API_KEY}",
|
||||||
|
"--port=8080"
|
||||||
|
]
|
||||||
|
networks:
|
||||||
|
- enclava-net
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
enclava-postgres-data:
|
||||||
|
driver: local
|
||||||
|
enclava-redis-data:
|
||||||
|
driver: local
|
||||||
|
enclava-qdrant-data:
|
||||||
|
driver: local
|
||||||
|
nginx-logs:
|
||||||
|
driver: local
|
||||||
|
|
||||||
|
networks:
|
||||||
|
enclava-net:
|
||||||
|
driver: bridge
|
||||||
@@ -70,11 +70,8 @@ services:
|
|||||||
# Required base URL (derives APP/API/WS URLs)
|
# Required base URL (derives APP/API/WS URLs)
|
||||||
- BASE_URL=${BASE_URL}
|
- BASE_URL=${BASE_URL}
|
||||||
- NEXT_PUBLIC_BASE_URL=${BASE_URL}
|
- NEXT_PUBLIC_BASE_URL=${BASE_URL}
|
||||||
# Docker internal ports
|
|
||||||
- BACKEND_INTERNAL_PORT=${BACKEND_INTERNAL_PORT}
|
|
||||||
- FRONTEND_INTERNAL_PORT=${FRONTEND_INTERNAL_PORT}
|
|
||||||
# Internal API URL
|
# Internal API URL
|
||||||
- INTERNAL_API_URL=http://enclava-backend:${BACKEND_INTERNAL_PORT}
|
- INTERNAL_API_URL=http://enclava-backend:8000
|
||||||
depends_on:
|
depends_on:
|
||||||
- enclava-backend
|
- enclava-backend
|
||||||
ports:
|
ports:
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Use the official Node.js runtime as the base image
|
# Use the official Node.js runtime as the base image
|
||||||
FROM node:18-alpine AS base
|
FROM node:22-alpine AS base
|
||||||
|
|
||||||
# Install dependencies only when needed
|
# Install dependencies only when needed
|
||||||
FROM base AS deps
|
FROM base AS deps
|
||||||
@@ -7,14 +7,10 @@ FROM base AS deps
|
|||||||
RUN apk add --no-cache libc6-compat
|
RUN apk add --no-cache libc6-compat
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install dependencies based on the preferred package manager
|
# Install dependencies with npm only
|
||||||
COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* ./
|
COPY package.json package-lock.json ./
|
||||||
RUN \
|
RUN npm install
|
||||||
if [ -f yarn.lock ]; then yarn --frozen-lockfile; \
|
RUN npm ci
|
||||||
elif [ -f package-lock.json ]; then npm ci; \
|
|
||||||
elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm i --frozen-lockfile; \
|
|
||||||
else echo "Lockfile not found." && exit 1; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Rebuild the source code only when needed
|
# Rebuild the source code only when needed
|
||||||
FROM base AS builder
|
FROM base AS builder
|
||||||
@@ -23,23 +19,20 @@ COPY --from=deps /app/node_modules ./node_modules
|
|||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Environment variables for build
|
# Environment variables for build
|
||||||
ENV NEXT_TELEMETRY_DISABLED 1
|
ENV NEXT_TELEMETRY_DISABLED=1
|
||||||
ENV NODE_ENV production
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
# Build the application
|
# Ensure all dependencies are installed for build
|
||||||
RUN \
|
RUN npm install
|
||||||
if [ -f yarn.lock ]; then yarn build; \
|
RUN npm install --save-dev autoprefixer
|
||||||
elif [ -f package-lock.json ]; then npm run build; \
|
RUN npm run build
|
||||||
elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm build; \
|
|
||||||
else echo "Lockfile not found." && exit 1; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Production image, copy all the files and run next
|
# Production image, copy all the files and run next
|
||||||
FROM base AS runner
|
FROM base AS runner
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ENV NODE_ENV production
|
ENV NODE_ENV=production
|
||||||
ENV NEXT_TELEMETRY_DISABLED 1
|
ENV NEXT_TELEMETRY_DISABLED=1
|
||||||
|
|
||||||
# Create nextjs user
|
# Create nextjs user
|
||||||
RUN addgroup --system --gid 1001 nodejs
|
RUN addgroup --system --gid 1001 nodejs
|
||||||
@@ -48,10 +41,10 @@ RUN adduser --system --uid 1001 nextjs
|
|||||||
# Copy node_modules from deps stage
|
# Copy node_modules from deps stage
|
||||||
COPY --from=deps /app/node_modules ./node_modules
|
COPY --from=deps /app/node_modules ./node_modules
|
||||||
|
|
||||||
# Copy built application
|
# Copy built application (standalone output)
|
||||||
COPY --from=builder /app/public ./public
|
COPY --from=builder /app/public ./public
|
||||||
COPY --from=builder /app/.next ./.next
|
COPY --from=builder /app/.next/standalone ./
|
||||||
COPY --from=builder /app/package.json ./package.json
|
COPY --from=builder /app/.next/static ./.next/static
|
||||||
|
|
||||||
# Set the correct permission for prerender cache
|
# Set the correct permission for prerender cache
|
||||||
RUN chown -R nextjs:nodejs .next
|
RUN chown -R nextjs:nodejs .next
|
||||||
@@ -61,8 +54,8 @@ USER nextjs
|
|||||||
# Expose port
|
# Expose port
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
|
|
||||||
ENV PORT 3000
|
ENV PORT=3000
|
||||||
ENV HOSTNAME "0.0.0.0"
|
ENV HOSTNAME=0.0.0.0
|
||||||
|
|
||||||
# Start the application
|
# Start the application (standalone)
|
||||||
CMD ["npm", "start"]
|
CMD ["node", "server.js"]
|
||||||
|
|||||||
@@ -1,3 +1,10 @@
|
|||||||
|
const path = require('path');
|
||||||
|
let TsconfigPathsPlugin;
|
||||||
|
try {
|
||||||
|
// Optional: only used if installed
|
||||||
|
TsconfigPathsPlugin = require('tsconfig-paths-webpack-plugin');
|
||||||
|
} catch (_) {}
|
||||||
|
|
||||||
/** @type {import('next').NextConfig} */
|
/** @type {import('next').NextConfig} */
|
||||||
const nextConfig = {
|
const nextConfig = {
|
||||||
reactStrictMode: true,
|
reactStrictMode: true,
|
||||||
@@ -9,7 +16,40 @@ const nextConfig = {
|
|||||||
typescript: {
|
typescript: {
|
||||||
ignoreBuildErrors: true,
|
ignoreBuildErrors: true,
|
||||||
},
|
},
|
||||||
experimental: {
|
// Enable standalone output for better Docker compatibility
|
||||||
|
output: 'standalone',
|
||||||
|
webpack: (config, { dev }) => {
|
||||||
|
// Ensure resolve object exists
|
||||||
|
config.resolve = config.resolve || {};
|
||||||
|
config.resolve.alias = config.resolve.alias || {};
|
||||||
|
|
||||||
|
// Hard-set robust alias for "@" => <repo>/src
|
||||||
|
config.resolve.alias['@'] = path.resolve(__dirname, 'src');
|
||||||
|
|
||||||
|
// Ensure common extensions are resolvable
|
||||||
|
const exts = config.resolve.extensions || [];
|
||||||
|
config.resolve.extensions = Array.from(new Set([...exts, '.ts', '.tsx', '.js', '.jsx']));
|
||||||
|
|
||||||
|
// Add tsconfig-aware resolver plugin if available
|
||||||
|
if (TsconfigPathsPlugin) {
|
||||||
|
const existing = config.resolve.plugins || [];
|
||||||
|
existing.push(
|
||||||
|
new TsconfigPathsPlugin({
|
||||||
|
configFile: path.resolve(__dirname, 'tsconfig.json'),
|
||||||
|
extensions: config.resolve.extensions,
|
||||||
|
mainFields: ['browser', 'module', 'main'],
|
||||||
|
})
|
||||||
|
);
|
||||||
|
config.resolve.plugins = existing;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optional: Add debug logging in development
|
||||||
|
if (dev) {
|
||||||
|
// eslint-disable-next-line no-console
|
||||||
|
console.log('Webpack alias config:', config.resolve.alias);
|
||||||
|
}
|
||||||
|
|
||||||
|
return config;
|
||||||
},
|
},
|
||||||
env: {
|
env: {
|
||||||
NEXT_PUBLIC_BASE_URL: process.env.NEXT_PUBLIC_BASE_URL,
|
NEXT_PUBLIC_BASE_URL: process.env.NEXT_PUBLIC_BASE_URL,
|
||||||
|
|||||||
1590
frontend/package-lock.json
generated
1590
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -9,6 +9,7 @@
|
|||||||
"lint": "next lint"
|
"lint": "next lint"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"tsconfig-paths-webpack-plugin": "^4.1.0",
|
||||||
"@hookform/resolvers": "^3.3.2",
|
"@hookform/resolvers": "^3.3.2",
|
||||||
"@radix-ui/react-alert-dialog": "^1.1.14",
|
"@radix-ui/react-alert-dialog": "^1.1.14",
|
||||||
"@radix-ui/react-avatar": "^1.0.4",
|
"@radix-ui/react-avatar": "^1.0.4",
|
||||||
@@ -30,6 +31,7 @@
|
|||||||
"@radix-ui/react-toast": "^1.1.5",
|
"@radix-ui/react-toast": "^1.1.5",
|
||||||
"@radix-ui/react-tooltip": "^1.0.7",
|
"@radix-ui/react-tooltip": "^1.0.7",
|
||||||
"@tailwindcss/typography": "^0.5.16",
|
"@tailwindcss/typography": "^0.5.16",
|
||||||
|
"autoprefixer": "^10.4.16",
|
||||||
"axios": "^1.6.2",
|
"axios": "^1.6.2",
|
||||||
"class-variance-authority": "^0.7.0",
|
"class-variance-authority": "^0.7.0",
|
||||||
"clsx": "^2.0.0",
|
"clsx": "^2.0.0",
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
module.exports = {
|
module.exports = {
|
||||||
plugins: {
|
plugins: {
|
||||||
tailwindcss: {},
|
tailwindcss: {},
|
||||||
autoprefixer: {},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { useState, useEffect } from "react";
|
import { useState, useEffect, Suspense } from "react";
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
import { useSearchParams } from "next/navigation";
|
import { useSearchParams } from "next/navigation";
|
||||||
import { Suspense } from "react";
|
import { Suspense } from "react";
|
||||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
|
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
|
||||||
@@ -95,6 +96,7 @@ const PERMISSION_OPTIONS = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
function ApiKeysContent() {
|
function ApiKeysContent() {
|
||||||
|
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const searchParams = useSearchParams();
|
const searchParams = useSearchParams();
|
||||||
const [apiKeys, setApiKeys] = useState<ApiKey[]>([]);
|
const [apiKeys, setApiKeys] = useState<ApiKey[]>([]);
|
||||||
@@ -915,3 +917,4 @@ export default function ApiKeysPage() {
|
|||||||
</Suspense>
|
</Suspense>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,28 +1,11 @@
|
|||||||
import { NextRequest, NextResponse } from 'next/server'
|
import { NextRequest, NextResponse } from 'next/server'
|
||||||
import { proxyRequest, handleProxyResponse } from '@/lib/proxy-auth'
|
import { apiClient } from '@/lib/api-client'
|
||||||
|
|
||||||
export async function GET() {
|
export async function GET() {
|
||||||
try {
|
try {
|
||||||
// Direct fetch instead of proxyRequest (proxyRequest had caching issues)
|
// Use the authenticated API client which handles JWT tokens automatically
|
||||||
const baseUrl = process.env.INTERNAL_API_URL || `http://enclava-backend:${process.env.BACKEND_INTERNAL_PORT || '8000'}`
|
const data = await apiClient.get('/modules/')
|
||||||
const url = `${baseUrl}/api/modules/`
|
|
||||||
const adminToken = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxIiwiZW1haWwiOiJhZG1pbkBleGFtcGxlLmNvbSIsImlzX3N1cGVydXNlciI6dHJ1ZSwicm9sZSI6InN1cGVyX2FkbWluIiwiZXhwIjoxNzg0Nzk2NDI2LjA0NDYxOX0.YOTlUY8nowkaLAXy5EKfnZEpbDgGCabru5R0jdq_DOQ'
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${adminToken}`,
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
},
|
|
||||||
// Disable caching to ensure fresh data
|
|
||||||
cache: 'no-store'
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Backend responded with ${response.status}: ${response.statusText}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
return NextResponse.json(data)
|
return NextResponse.json(data)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
"use client"
|
"use client"
|
||||||
|
|
||||||
import { useAuth } from "@/contexts/AuthContext"
|
import { useAuth } from "@/components/providers/auth-provider"
|
||||||
import { useState, useEffect } from "react"
|
import { useState, useEffect } from "react"
|
||||||
import { ProtectedRoute } from "@/components/auth/ProtectedRoute"
|
import { ProtectedRoute } from "@/components/auth/ProtectedRoute"
|
||||||
import { useToast } from "@/hooks/use-toast"
|
import { useToast } from "@/hooks/use-toast"
|
||||||
|
|||||||
379
frontend/src/app/debug/page.tsx
Normal file
379
frontend/src/app/debug/page.tsx
Normal file
@@ -0,0 +1,379 @@
|
|||||||
|
"use client"
|
||||||
|
|
||||||
|
import { useState, useEffect } from "react"
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"
|
||||||
|
import { Button } from "@/components/ui/button"
|
||||||
|
import { Input } from "@/components/ui/input"
|
||||||
|
import { Badge } from "@/components/ui/badge"
|
||||||
|
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"
|
||||||
|
import { ProtectedRoute } from "@/components/auth/ProtectedRoute"
|
||||||
|
import { apiClient } from "@/lib/api-client"
|
||||||
|
import { Bug, Database, Search, CheckCircle, XCircle, AlertCircle } from "lucide-react"
|
||||||
|
|
||||||
|
interface SystemStatus {
|
||||||
|
database: string
|
||||||
|
modules: Record<string, any>
|
||||||
|
redis: string
|
||||||
|
qdrant: string
|
||||||
|
timestamp: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ChatbotConfig {
|
||||||
|
chatbot: {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
type: string
|
||||||
|
description: string
|
||||||
|
created_at: string
|
||||||
|
is_active: boolean
|
||||||
|
conversation_count: number
|
||||||
|
}
|
||||||
|
prompt_template: {
|
||||||
|
type: string | null
|
||||||
|
system_prompt: string | null
|
||||||
|
variables: any[]
|
||||||
|
}
|
||||||
|
rag_collections: any[]
|
||||||
|
configuration: {
|
||||||
|
max_tokens: number
|
||||||
|
temperature: number
|
||||||
|
streaming: boolean
|
||||||
|
memory_config: any
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RagTestResult {
|
||||||
|
query: string
|
||||||
|
results: any[]
|
||||||
|
collections_searched: string[]
|
||||||
|
result_count: number
|
||||||
|
error?: string
|
||||||
|
message?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function DebugPage() {
|
||||||
|
const [systemStatus, setSystemStatus] = useState<SystemStatus | null>(null)
|
||||||
|
const [chatbots, setChatbots] = useState<any[]>([])
|
||||||
|
const [selectedChatbot, setSelectedChatbot] = useState<string>("")
|
||||||
|
const [chatbotConfig, setChatbotConfig] = useState<ChatbotConfig | null>(null)
|
||||||
|
const [ragQuery, setRagQuery] = useState("What is security?")
|
||||||
|
const [ragTest, setRagTest] = useState<RagTestResult | null>(null)
|
||||||
|
const [loading, setLoading] = useState(false)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
loadSystemStatus()
|
||||||
|
loadChatbots()
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
const loadSystemStatus = async () => {
|
||||||
|
try {
|
||||||
|
const response = await apiClient.get("/api-internal/v1/debugging/system/status")
|
||||||
|
setSystemStatus(response)
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to load system status:", error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const loadChatbots = async () => {
|
||||||
|
try {
|
||||||
|
const response = await apiClient.get("/api-internal/v1/chatbot/list")
|
||||||
|
setChatbots(response)
|
||||||
|
if (response.length > 0) {
|
||||||
|
setSelectedChatbot(response[0].id)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to load chatbots:", error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const loadChatbotConfig = async (chatbotId: string) => {
|
||||||
|
setLoading(true)
|
||||||
|
try {
|
||||||
|
const response = await apiClient.get(`/api-internal/v1/debugging/chatbot/${chatbotId}/config`)
|
||||||
|
setChatbotConfig(response)
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to load chatbot config:", error)
|
||||||
|
} finally {
|
||||||
|
setLoading(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const testRagSearch = async () => {
|
||||||
|
if (!selectedChatbot) return
|
||||||
|
|
||||||
|
setLoading(true)
|
||||||
|
try {
|
||||||
|
const response = await apiClient.get(
|
||||||
|
`/api-internal/v1/debugging/chatbot/${selectedChatbot}/test-rag`,
|
||||||
|
{ params: { query: ragQuery } }
|
||||||
|
)
|
||||||
|
setRagTest(response)
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to test RAG search:", error)
|
||||||
|
} finally {
|
||||||
|
setLoading(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const getStatusIcon = (status: string) => {
|
||||||
|
if (status.includes("healthy")) return <CheckCircle className="h-4 w-4 text-green-500" />
|
||||||
|
if (status.includes("error")) return <XCircle className="h-4 w-4 text-red-500" />
|
||||||
|
return <AlertCircle className="h-4 w-4 text-yellow-500" />
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ProtectedRoute>
|
||||||
|
<div className="container mx-auto px-4 py-8">
|
||||||
|
<div className="mb-8">
|
||||||
|
<h1 className="text-3xl font-bold mb-2">Debugging Dashboard</h1>
|
||||||
|
<p className="text-muted-foreground">
|
||||||
|
Troubleshoot and diagnose chatbot issues
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Tabs defaultValue="system" className="space-y-6">
|
||||||
|
<TabsList>
|
||||||
|
<TabsTrigger value="system">System Status</TabsTrigger>
|
||||||
|
<TabsTrigger value="chatbot">Chatbot Debug</TabsTrigger>
|
||||||
|
<TabsTrigger value="rag">RAG Testing</TabsTrigger>
|
||||||
|
</TabsList>
|
||||||
|
|
||||||
|
<TabsContent value="system" className="space-y-6">
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="flex items-center gap-2">
|
||||||
|
<Database className="h-5 w-5" />
|
||||||
|
System Health Status
|
||||||
|
</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-4">
|
||||||
|
{systemStatus ? (
|
||||||
|
<div className="grid gap-4">
|
||||||
|
<div className="flex items-center justify-between p-4 border rounded">
|
||||||
|
<span className="font-medium">Database</span>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
{getStatusIcon(systemStatus.database)}
|
||||||
|
<span className="text-sm">{systemStatus.database}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center justify-between p-4 border rounded">
|
||||||
|
<span className="font-medium">Redis</span>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
{getStatusIcon(systemStatus.redis)}
|
||||||
|
<span className="text-sm">{systemStatus.redis}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center justify-between p-4 border rounded">
|
||||||
|
<span className="font-medium">Qdrant</span>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
{getStatusIcon(systemStatus.qdrant)}
|
||||||
|
<span className="text-sm">{systemStatus.qdrant}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="mt-6">
|
||||||
|
<h4 className="font-medium mb-3">Modules Status</h4>
|
||||||
|
<div className="grid gap-2">
|
||||||
|
{Object.entries(systemStatus.modules).map(([name, info]: [string, any]) => (
|
||||||
|
<div key={name} className="flex items-center justify-between p-3 border rounded">
|
||||||
|
<span className="text-sm font-medium capitalize">{name}</span>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Badge variant={info.enabled ? "default" : "secondary"}>
|
||||||
|
{info.enabled ? "Enabled" : "Disabled"}
|
||||||
|
</Badge>
|
||||||
|
<Badge variant={info.status === "healthy" ? "default" : "destructive"}>
|
||||||
|
{info.status}
|
||||||
|
</Badge>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<p>Loading system status...</p>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</TabsContent>
|
||||||
|
|
||||||
|
<TabsContent value="chatbot" className="space-y-6">
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="flex items-center gap-2">
|
||||||
|
<Bug className="h-5 w-5" />
|
||||||
|
Chatbot Configuration
|
||||||
|
</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-4">
|
||||||
|
<div className="flex gap-4">
|
||||||
|
<div className="flex-1">
|
||||||
|
<label className="text-sm font-medium">Select Chatbot</label>
|
||||||
|
<select
|
||||||
|
value={selectedChatbot}
|
||||||
|
onChange={(e) => {
|
||||||
|
setSelectedChatbot(e.target.value)
|
||||||
|
if (e.target.value) {
|
||||||
|
loadChatbotConfig(e.target.value)
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
className="w-full mt-1 p-2 border rounded"
|
||||||
|
>
|
||||||
|
{chatbots.map((bot) => (
|
||||||
|
<option key={bot.id} value={bot.id}>
|
||||||
|
{bot.name}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
<Button
|
||||||
|
onClick={() => selectedChatbot && loadChatbotConfig(selectedChatbot)}
|
||||||
|
disabled={loading || !selectedChatbot}
|
||||||
|
>
|
||||||
|
Load Config
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{chatbotConfig && (
|
||||||
|
<div className="space-y-6 mt-6">
|
||||||
|
<div>
|
||||||
|
<h4 className="font-medium mb-2">Chatbot Info</h4>
|
||||||
|
<div className="p-4 border rounded space-y-2 text-sm">
|
||||||
|
<div><strong>Name:</strong> {chatbotConfig.chatbot.name}</div>
|
||||||
|
<div><strong>Type:</strong> {chatbotConfig.chatbot.type}</div>
|
||||||
|
<div><strong>Description:</strong> {chatbotConfig.chatbot.description}</div>
|
||||||
|
<div><strong>Active:</strong> {chatbotConfig.chatbot.is_active ? "Yes" : "No"}</div>
|
||||||
|
<div><strong>Conversations:</strong> {chatbotConfig.chatbot.conversation_count}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<h4 className="font-medium mb-2">Configuration</h4>
|
||||||
|
<div className="p-4 border rounded space-y-2 text-sm">
|
||||||
|
<div><strong>Max Tokens:</strong> {chatbotConfig.configuration.max_tokens}</div>
|
||||||
|
<div><strong>Temperature:</strong> {chatbotConfig.configuration.temperature}</div>
|
||||||
|
<div><strong>Streaming:</strong> {chatbotConfig.configuration.streaming ? "Yes" : "No"}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<h4 className="font-medium mb-2">Prompt Template</h4>
|
||||||
|
<div className="p-4 border rounded">
|
||||||
|
<div className="text-sm mb-2">
|
||||||
|
<strong>Type:</strong> {chatbotConfig.prompt_template.type || "None"}
|
||||||
|
</div>
|
||||||
|
{chatbotConfig.prompt_template.system_prompt && (
|
||||||
|
<div className="mt-3">
|
||||||
|
<div className="text-sm font-medium mb-1">System Prompt:</div>
|
||||||
|
<pre className="text-xs bg-muted p-3 rounded overflow-auto max-h-40">
|
||||||
|
{chatbotConfig.prompt_template.system_prompt}
|
||||||
|
</pre>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{chatbotConfig.rag_collections.length > 0 && (
|
||||||
|
<div>
|
||||||
|
<h4 className="font-medium mb-2">RAG Collections</h4>
|
||||||
|
<div className="space-y-2">
|
||||||
|
{chatbotConfig.rag_collections.map((collection) => (
|
||||||
|
<div key={collection.id} className="p-3 border rounded text-sm">
|
||||||
|
<div><strong>Name:</strong> {collection.name}</div>
|
||||||
|
<div><strong>Documents:</strong> {collection.document_count}</div>
|
||||||
|
<div><strong>Qdrant Collection:</strong> {collection.qdrant_collection_name}</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</TabsContent>
|
||||||
|
|
||||||
|
<TabsContent value="rag" className="space-y-6">
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="flex items-center gap-2">
|
||||||
|
<Search className="h-5 w-5" />
|
||||||
|
RAG Search Test
|
||||||
|
</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-4">
|
||||||
|
<div className="flex gap-4">
|
||||||
|
<div className="flex-1">
|
||||||
|
<label className="text-sm font-medium">Test Query</label>
|
||||||
|
<Input
|
||||||
|
value={ragQuery}
|
||||||
|
onChange={(e) => setRagQuery(e.target.value)}
|
||||||
|
placeholder="Enter a test query..."
|
||||||
|
className="mt-1"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<Button onClick={testRagSearch} disabled={loading || !selectedChatbot}>
|
||||||
|
Test Search
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{ragTest && (
|
||||||
|
<div className="mt-6 space-y-4">
|
||||||
|
<div className="p-4 border rounded">
|
||||||
|
<h4 className="font-medium mb-2">Test Results</h4>
|
||||||
|
<div className="text-sm space-y-1">
|
||||||
|
<div><strong>Query:</strong> {ragTest.query}</div>
|
||||||
|
<div><strong>Results Found:</strong> {ragTest.result_count}</div>
|
||||||
|
<div><strong>Collections Searched:</strong> {ragTest.collections_searched.join(", ")}</div>
|
||||||
|
{ragTest.message && (
|
||||||
|
<div><strong>Message:</strong> {ragTest.message}</div>
|
||||||
|
)}
|
||||||
|
{ragTest.error && (
|
||||||
|
<div className="text-red-500"><strong>Error:</strong> {ragTest.error}</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{ragTest.results.length > 0 && (
|
||||||
|
<div>
|
||||||
|
<h4 className="font-medium mb-2">Search Results</h4>
|
||||||
|
<div className="space-y-3 max-h-96 overflow-y-auto">
|
||||||
|
{ragTest.results.map((result, index) => (
|
||||||
|
<div key={index} className="p-3 border rounded text-sm">
|
||||||
|
<div className="flex justify-between items-start mb-2">
|
||||||
|
<Badge variant="outline">Score: {result.score?.toFixed(3) || "N/A"}</Badge>
|
||||||
|
{result.collection_name && (
|
||||||
|
<Badge variant="secondary">{result.collection_name}</Badge>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<div className="text-xs text-muted-foreground mb-1">
|
||||||
|
{result.metadata?.source || "Unknown source"}
|
||||||
|
</div>
|
||||||
|
<div className="text-sm">
|
||||||
|
{result.content?.substring(0, 200)}
|
||||||
|
{result.content?.length > 200 && "..."}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</TabsContent>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
|
<div className="mt-8 p-4 border rounded">
|
||||||
|
<h3 className="font-medium mb-2">How to Use This Dashboard</h3>
|
||||||
|
<ul className="text-sm text-muted-foreground space-y-1">
|
||||||
|
<li>• <strong>System Status:</strong> Check if all services (Database, Redis, Qdrant) are healthy</li>
|
||||||
|
<li>• <strong>Chatbot Debug:</strong> View detailed configuration for any chatbot</li>
|
||||||
|
<li>• <strong>RAG Testing:</strong> Test if document search is working correctly</li>
|
||||||
|
<li>• Check browser console logs for detailed request/response debugging information</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</ProtectedRoute>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -4,9 +4,10 @@ import './globals.css'
|
|||||||
import { ThemeProvider } from '@/components/providers/theme-provider'
|
import { ThemeProvider } from '@/components/providers/theme-provider'
|
||||||
import { Toaster } from '@/components/ui/toaster'
|
import { Toaster } from '@/components/ui/toaster'
|
||||||
import { Toaster as HotToaster } from 'react-hot-toast'
|
import { Toaster as HotToaster } from 'react-hot-toast'
|
||||||
import { AuthProvider } from '@/contexts/AuthContext'
|
import { AuthProvider } from '@/components/providers/auth-provider'
|
||||||
import { ModulesProvider } from '@/contexts/ModulesContext'
|
import { ModulesProvider } from '@/contexts/ModulesContext'
|
||||||
import { PluginProvider } from '@/contexts/PluginContext'
|
import { PluginProvider } from '@/contexts/PluginContext'
|
||||||
|
import { ToastProvider } from '@/contexts/ToastContext'
|
||||||
import { Navigation } from '@/components/ui/navigation'
|
import { Navigation } from '@/components/ui/navigation'
|
||||||
|
|
||||||
const inter = Inter({ subsets: ['latin'] })
|
const inter = Inter({ subsets: ['latin'] })
|
||||||
@@ -16,8 +17,21 @@ export const viewport: Viewport = {
|
|||||||
initialScale: 1,
|
initialScale: 1,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Function to determine the base URL with proper protocol
|
||||||
|
const getBaseUrl = () => {
|
||||||
|
// In production, we need to detect if we're behind HTTPS
|
||||||
|
if (typeof window !== 'undefined') {
|
||||||
|
const protocol = window.location.protocol === 'https:' ? 'https' : 'http'
|
||||||
|
const host = process.env.NEXT_PUBLIC_BASE_URL || window.location.hostname
|
||||||
|
return `${protocol}://${host}`
|
||||||
|
}
|
||||||
|
// For build time/server side, default to HTTP for dev, HTTPS for production
|
||||||
|
const protocol = process.env.NODE_ENV === 'production' ? 'https' : 'http'
|
||||||
|
return `${protocol}://${process.env.NEXT_PUBLIC_BASE_URL || 'localhost'}`
|
||||||
|
}
|
||||||
|
|
||||||
export const metadata: Metadata = {
|
export const metadata: Metadata = {
|
||||||
metadataBase: new URL(`http://${process.env.NEXT_PUBLIC_BASE_URL || 'localhost'}`),
|
metadataBase: new URL(getBaseUrl()),
|
||||||
title: 'Enclava Platform',
|
title: 'Enclava Platform',
|
||||||
description: 'Secure AI processing platform with plugin-based architecture and confidential computing',
|
description: 'Secure AI processing platform with plugin-based architecture and confidential computing',
|
||||||
keywords: ['AI', 'Enclava', 'Confidential Computing', 'LLM', 'TEE'],
|
keywords: ['AI', 'Enclava', 'Confidential Computing', 'LLM', 'TEE'],
|
||||||
@@ -26,7 +40,7 @@ export const metadata: Metadata = {
|
|||||||
openGraph: {
|
openGraph: {
|
||||||
type: 'website',
|
type: 'website',
|
||||||
locale: 'en_US',
|
locale: 'en_US',
|
||||||
url: `http://${process.env.NEXT_PUBLIC_BASE_URL || 'localhost'}`,
|
url: getBaseUrl(),
|
||||||
title: 'Enclava Platform',
|
title: 'Enclava Platform',
|
||||||
description: 'Secure AI processing platform with plugin-based architecture and confidential computing',
|
description: 'Secure AI processing platform with plugin-based architecture and confidential computing',
|
||||||
siteName: 'Enclava',
|
siteName: 'Enclava',
|
||||||
@@ -55,13 +69,15 @@ export default function RootLayout({
|
|||||||
<AuthProvider>
|
<AuthProvider>
|
||||||
<ModulesProvider>
|
<ModulesProvider>
|
||||||
<PluginProvider>
|
<PluginProvider>
|
||||||
<div className="min-h-screen bg-background">
|
<ToastProvider>
|
||||||
<Navigation />
|
<div className="min-h-screen bg-background">
|
||||||
<main className="container mx-auto px-4 py-8">
|
<Navigation />
|
||||||
{children}
|
<main className="container mx-auto px-4 py-8">
|
||||||
</main>
|
{children}
|
||||||
</div>
|
</main>
|
||||||
<Toaster />
|
</div>
|
||||||
|
<Toaster />
|
||||||
|
</ToastProvider>
|
||||||
<HotToaster />
|
<HotToaster />
|
||||||
</PluginProvider>
|
</PluginProvider>
|
||||||
</ModulesProvider>
|
</ModulesProvider>
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ import {
|
|||||||
Plus,
|
Plus,
|
||||||
Settings,
|
Settings,
|
||||||
Trash2,
|
Trash2,
|
||||||
Copy,
|
|
||||||
Calendar,
|
Calendar,
|
||||||
Lock,
|
Lock,
|
||||||
Unlock,
|
Unlock,
|
||||||
@@ -187,15 +186,6 @@ function LLMPageContent() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const copyToClipboard = (text: string, type: string = "API key") => {
|
|
||||||
navigator.clipboard.writeText(text)
|
|
||||||
toast({
|
|
||||||
title: "Copied!",
|
|
||||||
description: `${type} copied to clipboard`
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
const formatCurrency = (cents: number) => {
|
const formatCurrency = (cents: number) => {
|
||||||
return `$${(cents / 100).toFixed(4)}`
|
return `$${(cents / 100).toFixed(4)}`
|
||||||
}
|
}
|
||||||
@@ -205,21 +195,6 @@ function LLMPageContent() {
|
|||||||
return new Date(dateStr).toLocaleDateString()
|
return new Date(dateStr).toLocaleDateString()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Get the public API URL from the current window location
|
|
||||||
const getPublicApiUrl = () => {
|
|
||||||
if (typeof window !== 'undefined') {
|
|
||||||
const protocol = window.location.protocol
|
|
||||||
const hostname = window.location.hostname
|
|
||||||
const port = window.location.port || (protocol === 'https:' ? '443' : '80')
|
|
||||||
const portSuffix = (protocol === 'https:' && port === '443') || (protocol === 'http:' && port === '80') ? '' : `:${port}`
|
|
||||||
return `${protocol}//${hostname}${portSuffix}/api/v1`
|
|
||||||
}
|
|
||||||
return 'http://localhost/api/v1'
|
|
||||||
}
|
|
||||||
|
|
||||||
const publicApiUrl = getPublicApiUrl()
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="container mx-auto px-4 py-8">
|
<div className="container mx-auto px-4 py-8">
|
||||||
<div className="mb-8">
|
<div className="mb-8">
|
||||||
@@ -229,77 +204,6 @@ function LLMPageContent() {
|
|||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Public API URL Display */}
|
|
||||||
<Card className="mb-6 border-blue-200 bg-blue-50">
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle className="flex items-center gap-2 text-blue-700">
|
|
||||||
<Settings className="h-5 w-5" />
|
|
||||||
OpenAI-Compatible API Configuration
|
|
||||||
</CardTitle>
|
|
||||||
<CardDescription className="text-blue-600">
|
|
||||||
Use this endpoint URL to configure external tools like Open WebUI, Continue.dev, or any OpenAI-compatible client.
|
|
||||||
</CardDescription>
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent>
|
|
||||||
<div className="space-y-4">
|
|
||||||
<div>
|
|
||||||
<Label className="text-sm font-medium text-blue-700">API Base URL</Label>
|
|
||||||
<div className="mt-1 flex items-center gap-2">
|
|
||||||
<code className="flex-1 p-3 bg-white border border-blue-200 rounded-md text-sm font-mono">
|
|
||||||
{publicApiUrl}
|
|
||||||
</code>
|
|
||||||
<Button
|
|
||||||
onClick={() => copyToClipboard(publicApiUrl, "API URL")}
|
|
||||||
variant="outline"
|
|
||||||
size="sm"
|
|
||||||
className="flex items-center gap-1 border-blue-300 text-blue-700 hover:bg-blue-100"
|
|
||||||
>
|
|
||||||
<Copy className="h-4 w-4" />
|
|
||||||
Copy
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4 text-sm">
|
|
||||||
<div className="space-y-2">
|
|
||||||
<h4 className="font-medium text-blue-700">Available Endpoints:</h4>
|
|
||||||
<ul className="space-y-1 text-blue-600">
|
|
||||||
<li>• <code>GET /v1/models</code> - List available models</li>
|
|
||||||
<li>• <code>POST /v1/chat/completions</code> - Chat completions</li>
|
|
||||||
<li>• <code>POST /v1/embeddings</code> - Text embeddings</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="space-y-2">
|
|
||||||
<h4 className="font-medium text-blue-700">Configuration Example:</h4>
|
|
||||||
<div className="bg-white border border-blue-200 rounded p-2 text-xs font-mono">
|
|
||||||
<div>Base URL: {publicApiUrl}</div>
|
|
||||||
<div>API Key: ce_your_api_key</div>
|
|
||||||
<div>Model: gpt-3.5-turbo</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="bg-blue-100 border border-blue-200 rounded-lg p-3">
|
|
||||||
<div className="flex items-start gap-2">
|
|
||||||
<AlertTriangle className="h-4 w-4 text-blue-600 mt-0.5 flex-shrink-0" />
|
|
||||||
<div className="text-sm text-blue-700">
|
|
||||||
<span className="font-medium">Setup Instructions:</span>
|
|
||||||
<br />
|
|
||||||
1. Copy the API Base URL above
|
|
||||||
<br />
|
|
||||||
2. Create an API key in the "API Keys" tab below
|
|
||||||
<br />
|
|
||||||
3. Use both in your OpenAI-compatible client configuration
|
|
||||||
<br />
|
|
||||||
4. Do NOT append additional paths like "/models" - clients handle this automatically
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
<Tabs value={activeTab} onValueChange={setActiveTab}>
|
<Tabs value={activeTab} onValueChange={setActiveTab}>
|
||||||
<TabsList className="grid w-full grid-cols-2">
|
<TabsList className="grid w-full grid-cols-2">
|
||||||
<TabsTrigger value="api-keys">API Keys</TabsTrigger>
|
<TabsTrigger value="api-keys">API Keys</TabsTrigger>
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import { useState } from "react"
|
import { useState } from "react"
|
||||||
import { useRouter } from "next/navigation"
|
import { useRouter } from "next/navigation"
|
||||||
import { useAuth } from "@/contexts/AuthContext"
|
import { useAuth } from "@/components/providers/auth-provider"
|
||||||
|
|
||||||
// Force dynamic rendering for authentication
|
// Force dynamic rendering for authentication
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
"use client"
|
"use client"
|
||||||
|
|
||||||
import { useAuth } from "@/contexts/AuthContext"
|
import { useAuth } from "@/components/providers/auth-provider"
|
||||||
import { useRouter } from "next/navigation"
|
import { useRouter } from "next/navigation"
|
||||||
import { useEffect } from "react"
|
import { useEffect } from "react"
|
||||||
|
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ import { Edit3, RotateCcw, Loader2, Save, AlertTriangle, Plus, Sparkles } from '
|
|||||||
import toast from 'react-hot-toast'
|
import toast from 'react-hot-toast'
|
||||||
import { apiClient } from '@/lib/api-client'
|
import { apiClient } from '@/lib/api-client'
|
||||||
import { config } from '@/lib/config'
|
import { config } from '@/lib/config'
|
||||||
import { useAuth } from '@/contexts/AuthContext'
|
import { useAuth } from '@/components/providers/auth-provider'
|
||||||
|
|
||||||
interface PromptTemplate {
|
interface PromptTemplate {
|
||||||
id: string
|
id: string
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import { Plus, Database, Upload, Search, Trash2, FileText, AlertCircle } from "l
|
|||||||
import { CollectionManager } from "@/components/rag/collection-manager"
|
import { CollectionManager } from "@/components/rag/collection-manager"
|
||||||
import { DocumentUpload } from "@/components/rag/document-upload"
|
import { DocumentUpload } from "@/components/rag/document-upload"
|
||||||
import { DocumentBrowser } from "@/components/rag/document-browser"
|
import { DocumentBrowser } from "@/components/rag/document-browser"
|
||||||
import { useAuth } from "@/contexts/AuthContext"
|
import { useAuth } from "@/components/providers/auth-provider"
|
||||||
import { ProtectedRoute } from '@/components/auth/ProtectedRoute'
|
import { ProtectedRoute } from '@/components/auth/ProtectedRoute'
|
||||||
import { apiClient } from '@/lib/api-client'
|
import { apiClient } from '@/lib/api-client'
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ import {
|
|||||||
Settings,
|
Settings,
|
||||||
Save,
|
Save,
|
||||||
RefreshCw,
|
RefreshCw,
|
||||||
Shield,
|
|
||||||
Globe,
|
Globe,
|
||||||
Database,
|
Database,
|
||||||
Mail,
|
Mail,
|
||||||
@@ -36,30 +35,9 @@ import { useModules, triggerModuleRefresh } from '@/contexts/ModulesContext';
|
|||||||
import { Badge } from '@/components/ui/badge';
|
import { Badge } from '@/components/ui/badge';
|
||||||
|
|
||||||
interface SystemSettings {
|
interface SystemSettings {
|
||||||
// Security Settings
|
|
||||||
security: {
|
|
||||||
password_min_length: number;
|
|
||||||
password_require_uppercase: boolean;
|
|
||||||
password_require_lowercase: boolean;
|
|
||||||
password_require_numbers: boolean;
|
|
||||||
password_require_symbols: boolean;
|
|
||||||
session_timeout_minutes: number;
|
|
||||||
max_login_attempts: number;
|
|
||||||
lockout_duration_minutes: number;
|
|
||||||
require_2fa: boolean;
|
|
||||||
allowed_domains: string[];
|
|
||||||
};
|
|
||||||
|
|
||||||
// API Settings
|
// API Settings
|
||||||
api: {
|
api: {
|
||||||
// Security Settings
|
|
||||||
security_enabled: boolean;
|
|
||||||
threat_detection_enabled: boolean;
|
|
||||||
rate_limiting_enabled: boolean;
|
|
||||||
ip_reputation_enabled: boolean;
|
|
||||||
anomaly_detection_enabled: boolean;
|
|
||||||
security_headers_enabled: boolean;
|
|
||||||
|
|
||||||
// Rate Limiting by Authentication Level
|
// Rate Limiting by Authentication Level
|
||||||
rate_limit_authenticated_per_minute: number;
|
rate_limit_authenticated_per_minute: number;
|
||||||
rate_limit_authenticated_per_hour: number;
|
rate_limit_authenticated_per_hour: number;
|
||||||
@@ -68,22 +46,12 @@ interface SystemSettings {
|
|||||||
rate_limit_premium_per_minute: number;
|
rate_limit_premium_per_minute: number;
|
||||||
rate_limit_premium_per_hour: number;
|
rate_limit_premium_per_hour: number;
|
||||||
|
|
||||||
// Security Thresholds
|
|
||||||
security_risk_threshold: number;
|
|
||||||
security_warning_threshold: number;
|
|
||||||
anomaly_threshold: number;
|
|
||||||
|
|
||||||
// Request Settings
|
// Request Settings
|
||||||
max_request_size_mb: number;
|
max_request_size_mb: number;
|
||||||
max_request_size_premium_mb: number;
|
max_request_size_premium_mb: number;
|
||||||
enable_cors: boolean;
|
enable_cors: boolean;
|
||||||
cors_origins: string[];
|
cors_origins: string[];
|
||||||
api_key_expiry_days: number;
|
api_key_expiry_days: number;
|
||||||
|
|
||||||
// IP Security
|
|
||||||
blocked_ips: string[];
|
|
||||||
allowed_ips: string[];
|
|
||||||
csp_header: string;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Notification Settings
|
// Notification Settings
|
||||||
@@ -95,7 +63,6 @@ interface SystemSettings {
|
|||||||
smtp_use_tls: boolean;
|
smtp_use_tls: boolean;
|
||||||
from_address: string;
|
from_address: string;
|
||||||
budget_alerts: boolean;
|
budget_alerts: boolean;
|
||||||
security_alerts: boolean;
|
|
||||||
system_alerts: boolean;
|
system_alerts: boolean;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -183,7 +150,10 @@ function SettingsPageContent() {
|
|||||||
|
|
||||||
// Transform each category from backend format {key: {value, type, description}}
|
// Transform each category from backend format {key: {value, type, description}}
|
||||||
// to frontend format {key: value}
|
// to frontend format {key: value}
|
||||||
|
// Skip security category as it has been removed from the UI
|
||||||
for (const [categoryName, categorySettings] of Object.entries(data)) {
|
for (const [categoryName, categorySettings] of Object.entries(data)) {
|
||||||
|
if (categoryName === 'security') continue; // Skip security settings
|
||||||
|
|
||||||
if (typeof categorySettings === 'object' && categorySettings !== null) {
|
if (typeof categorySettings === 'object' && categorySettings !== null) {
|
||||||
transformedSettings[categoryName as keyof SystemSettings] = {} as any;
|
transformedSettings[categoryName as keyof SystemSettings] = {} as any;
|
||||||
|
|
||||||
@@ -384,214 +354,27 @@ function SettingsPageContent() {
|
|||||||
</Alert>
|
</Alert>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<Tabs defaultValue="security" className="space-y-6">
|
<Tabs defaultValue="api" className="space-y-6">
|
||||||
<TabsList className="grid w-full grid-cols-4">
|
<TabsList className="grid w-full grid-cols-3">
|
||||||
<TabsTrigger value="security">Security</TabsTrigger>
|
|
||||||
<TabsTrigger value="api">API</TabsTrigger>
|
<TabsTrigger value="api">API</TabsTrigger>
|
||||||
<TabsTrigger value="notifications">Notifications</TabsTrigger>
|
<TabsTrigger value="notifications">Notifications</TabsTrigger>
|
||||||
<TabsTrigger value="modules">Modules</TabsTrigger>
|
<TabsTrigger value="modules">Modules</TabsTrigger>
|
||||||
</TabsList>
|
</TabsList>
|
||||||
|
|
||||||
<TabsContent value="security" className="space-y-6">
|
<TabsContent value="api" className="space-y-6">
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle className="flex items-center">
|
|
||||||
<Shield className="mr-2 h-5 w-5" />
|
|
||||||
Security Settings
|
|
||||||
</CardTitle>
|
|
||||||
<CardDescription>
|
|
||||||
Configure password policies, session management, and authentication settings
|
|
||||||
</CardDescription>
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent className="space-y-6">
|
|
||||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-6">
|
|
||||||
<div className="space-y-4">
|
|
||||||
<h3 className="text-lg font-medium">Password Policy</h3>
|
|
||||||
<div className="space-y-3">
|
|
||||||
<div>
|
|
||||||
<Label htmlFor="password-min-length">Minimum Password Length</Label>
|
|
||||||
<Input
|
|
||||||
id="password-min-length"
|
|
||||||
type="number"
|
|
||||||
min="6"
|
|
||||||
max="50"
|
|
||||||
value={settings.security.password_min_length}
|
|
||||||
onChange={(e) => updateSetting("security", "password_min_length", parseInt(e.target.value))}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div className="space-y-2">
|
|
||||||
<div className="flex items-center space-x-2">
|
|
||||||
<Switch
|
|
||||||
checked={settings.security.password_require_uppercase}
|
|
||||||
onCheckedChange={(checked) => updateSetting("security", "password_require_uppercase", checked)}
|
|
||||||
/>
|
|
||||||
<Label>Require uppercase letters</Label>
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center space-x-2">
|
|
||||||
<Switch
|
|
||||||
checked={settings.security.password_require_lowercase}
|
|
||||||
onCheckedChange={(checked) => updateSetting("security", "password_require_lowercase", checked)}
|
|
||||||
/>
|
|
||||||
<Label>Require lowercase letters</Label>
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center space-x-2">
|
|
||||||
<Switch
|
|
||||||
checked={settings.security.password_require_numbers}
|
|
||||||
onCheckedChange={(checked) => updateSetting("security", "password_require_numbers", checked)}
|
|
||||||
/>
|
|
||||||
<Label>Require numbers</Label>
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center space-x-2">
|
|
||||||
<Switch
|
|
||||||
checked={settings.security.password_require_symbols}
|
|
||||||
onCheckedChange={(checked) => updateSetting("security", "password_require_symbols", checked)}
|
|
||||||
/>
|
|
||||||
<Label>Require special characters</Label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="space-y-4">
|
|
||||||
<h3 className="text-lg font-medium">Session & Authentication</h3>
|
|
||||||
<div className="space-y-3">
|
|
||||||
<div>
|
|
||||||
<Label htmlFor="session-timeout">Session Timeout (minutes)</Label>
|
|
||||||
<Input
|
|
||||||
id="session-timeout"
|
|
||||||
type="number"
|
|
||||||
min="5"
|
|
||||||
max="1440"
|
|
||||||
value={settings.security.session_timeout_minutes}
|
|
||||||
onChange={(e) => updateSetting("security", "session_timeout_minutes", parseInt(e.target.value))}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<Label htmlFor="max-login-attempts">Max Login Attempts</Label>
|
|
||||||
<Input
|
|
||||||
id="max-login-attempts"
|
|
||||||
type="number"
|
|
||||||
min="3"
|
|
||||||
max="10"
|
|
||||||
value={settings.security.max_login_attempts}
|
|
||||||
onChange={(e) => updateSetting("security", "max_login_attempts", parseInt(e.target.value))}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<Label htmlFor="lockout-duration">Lockout Duration (minutes)</Label>
|
|
||||||
<Input
|
|
||||||
id="lockout-duration"
|
|
||||||
type="number"
|
|
||||||
min="5"
|
|
||||||
max="60"
|
|
||||||
value={settings.security.lockout_duration_minutes}
|
|
||||||
onChange={(e) => updateSetting("security", "lockout_duration_minutes", parseInt(e.target.value))}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center space-x-2">
|
|
||||||
<Switch
|
|
||||||
checked={settings.security.require_2fa}
|
|
||||||
onCheckedChange={(checked) => updateSetting("security", "require_2fa", checked)}
|
|
||||||
/>
|
|
||||||
<Label>Require Two-Factor Authentication</Label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<Label htmlFor="allowed-domains">Allowed Email Domains (one per line)</Label>
|
|
||||||
<Textarea
|
|
||||||
id="allowed-domains"
|
|
||||||
value={settings.security.allowed_domains.join('\n')}
|
|
||||||
onChange={(e) => updateSetting("security", "allowed_domains", e.target.value.split('\n').filter(d => d.trim()))}
|
|
||||||
placeholder="example.com company.org"
|
|
||||||
rows={3}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<Button
|
|
||||||
onClick={() => handleSaveSection("security")}
|
|
||||||
disabled={saving === "security"}
|
|
||||||
>
|
|
||||||
<Save className="mr-2 h-4 w-4" />
|
|
||||||
{saving === "security" ? "Saving..." : "Save Security Settings"}
|
|
||||||
</Button>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
</TabsContent>
|
|
||||||
|
|
||||||
<TabsContent value="api" className="space-y-6">
|
|
||||||
<Card>
|
<Card>
|
||||||
<CardHeader>
|
<CardHeader>
|
||||||
<CardTitle className="flex items-center">
|
<CardTitle className="flex items-center">
|
||||||
<Globe className="mr-2 h-5 w-5" />
|
<Globe className="mr-2 h-5 w-5" />
|
||||||
API & Security Settings
|
API Settings
|
||||||
</CardTitle>
|
</CardTitle>
|
||||||
<CardDescription>
|
<CardDescription>
|
||||||
Configure API security, rate limits, threat detection, and request handling
|
Configure API rate limits and request handling
|
||||||
</CardDescription>
|
</CardDescription>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent className="space-y-6">
|
<CardContent className="space-y-6">
|
||||||
{/* Security Features */}
|
|
||||||
<div className="space-y-4">
|
|
||||||
<h3 className="text-lg font-medium flex items-center">
|
|
||||||
<Shield className="mr-2 h-5 w-5" />
|
|
||||||
Security Features
|
|
||||||
</h3>
|
|
||||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-6">
|
|
||||||
<div className="space-y-3">
|
|
||||||
<div className="flex items-center space-x-2">
|
|
||||||
<Switch
|
|
||||||
checked={settings.api.security_enabled}
|
|
||||||
onCheckedChange={(checked) => updateSetting("api", "security_enabled", checked)}
|
|
||||||
/>
|
|
||||||
<Label>Enable API Security</Label>
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center space-x-2">
|
|
||||||
<Switch
|
|
||||||
checked={settings.api.threat_detection_enabled}
|
|
||||||
onCheckedChange={(checked) => updateSetting("api", "threat_detection_enabled", checked)}
|
|
||||||
/>
|
|
||||||
<Label>Threat Detection</Label>
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center space-x-2">
|
|
||||||
<Switch
|
|
||||||
checked={settings.api.rate_limiting_enabled}
|
|
||||||
onCheckedChange={(checked) => updateSetting("api", "rate_limiting_enabled", checked)}
|
|
||||||
/>
|
|
||||||
<Label>Rate Limiting</Label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="space-y-3">
|
|
||||||
<div className="flex items-center space-x-2">
|
|
||||||
<Switch
|
|
||||||
checked={settings.api.ip_reputation_enabled}
|
|
||||||
onCheckedChange={(checked) => updateSetting("api", "ip_reputation_enabled", checked)}
|
|
||||||
/>
|
|
||||||
<Label>IP Reputation Checking</Label>
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center space-x-2">
|
|
||||||
<Switch
|
|
||||||
checked={settings.api.anomaly_detection_enabled}
|
|
||||||
onCheckedChange={(checked) => updateSetting("api", "anomaly_detection_enabled", checked)}
|
|
||||||
/>
|
|
||||||
<Label>Anomaly Detection</Label>
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center space-x-2">
|
|
||||||
<Switch
|
|
||||||
checked={settings.api.security_headers_enabled}
|
|
||||||
onCheckedChange={(checked) => updateSetting("api", "security_headers_enabled", checked)}
|
|
||||||
/>
|
|
||||||
<Label>Security Headers</Label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Rate Limiting by Authentication Level */}
|
{/* Rate Limiting by Authentication Level */}
|
||||||
{settings.api.rate_limiting_enabled && (
|
<div className="space-y-4">
|
||||||
<div className="space-y-4">
|
|
||||||
<h3 className="text-lg font-medium">Rate Limiting by Authentication Level</h3>
|
<h3 className="text-lg font-medium">Rate Limiting by Authentication Level</h3>
|
||||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
<div className="space-y-4">
|
<div className="space-y-4">
|
||||||
@@ -677,84 +460,6 @@ function SettingsPageContent() {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Security Thresholds */}
|
|
||||||
{settings.api.security_enabled && (
|
|
||||||
<div className="space-y-4">
|
|
||||||
<h3 className="text-lg font-medium">Security Thresholds</h3>
|
|
||||||
<div className="grid grid-cols-1 md:grid-cols-3 gap-4">
|
|
||||||
<div>
|
|
||||||
<Label htmlFor="risk-threshold">Risk Threshold (Block)</Label>
|
|
||||||
<Input
|
|
||||||
id="risk-threshold"
|
|
||||||
type="number"
|
|
||||||
min="0"
|
|
||||||
max="1"
|
|
||||||
step="0.1"
|
|
||||||
value={settings.api.security_risk_threshold}
|
|
||||||
onChange={(e) => updateSetting("api", "security_risk_threshold", parseFloat(e.target.value))}
|
|
||||||
/>
|
|
||||||
<p className="text-xs text-muted-foreground mt-1">Requests above this score are blocked</p>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<Label htmlFor="warning-threshold">Warning Threshold</Label>
|
|
||||||
<Input
|
|
||||||
id="warning-threshold"
|
|
||||||
type="number"
|
|
||||||
min="0"
|
|
||||||
max="1"
|
|
||||||
step="0.1"
|
|
||||||
value={settings.api.security_warning_threshold}
|
|
||||||
onChange={(e) => updateSetting("api", "security_warning_threshold", parseFloat(e.target.value))}
|
|
||||||
/>
|
|
||||||
<p className="text-xs text-muted-foreground mt-1">Requests above this score generate warnings</p>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<Label htmlFor="anomaly-threshold">Anomaly Threshold</Label>
|
|
||||||
<Input
|
|
||||||
id="anomaly-threshold"
|
|
||||||
type="number"
|
|
||||||
min="0"
|
|
||||||
max="1"
|
|
||||||
step="0.1"
|
|
||||||
value={settings.api.anomaly_threshold}
|
|
||||||
onChange={(e) => updateSetting("api", "anomaly_threshold", parseFloat(e.target.value))}
|
|
||||||
/>
|
|
||||||
<p className="text-xs text-muted-foreground mt-1">Anomalies above this threshold are flagged</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* IP Security */}
|
|
||||||
{settings.api.security_enabled && (
|
|
||||||
<div className="space-y-4">
|
|
||||||
<h3 className="text-lg font-medium">IP Security</h3>
|
|
||||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-6">
|
|
||||||
<div>
|
|
||||||
<Label htmlFor="blocked-ips">Blocked IPs (one per line)</Label>
|
|
||||||
<Textarea
|
|
||||||
id="blocked-ips"
|
|
||||||
value={settings.api.blocked_ips.join('\n')}
|
|
||||||
onChange={(e) => updateSetting("api", "blocked_ips", e.target.value.split('\n').filter(ip => ip.trim()))}
|
|
||||||
placeholder="192.168.1.100 10.0.0.50"
|
|
||||||
rows={3}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<Label htmlFor="allowed-ips">Allowed IPs (empty = allow all)</Label>
|
|
||||||
<Textarea
|
|
||||||
id="allowed-ips"
|
|
||||||
value={settings.api.allowed_ips.join('\n')}
|
|
||||||
onChange={(e) => updateSetting("api", "allowed_ips", e.target.value.split('\n').filter(ip => ip.trim()))}
|
|
||||||
placeholder="192.168.1.0/24 10.0.0.1"
|
|
||||||
rows={3}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Request Settings */}
|
{/* Request Settings */}
|
||||||
<div className="space-y-4">
|
<div className="space-y-4">
|
||||||
@@ -823,28 +528,12 @@ function SettingsPageContent() {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Security Headers */}
|
|
||||||
{settings.api.security_headers_enabled && (
|
|
||||||
<div className="space-y-4">
|
|
||||||
<div>
|
|
||||||
<Label htmlFor="csp-header">Content Security Policy Header</Label>
|
|
||||||
<Textarea
|
|
||||||
id="csp-header"
|
|
||||||
value={settings.api.csp_header}
|
|
||||||
onChange={(e) => updateSetting("api", "csp_header", e.target.value)}
|
|
||||||
placeholder="default-src 'self'; script-src 'self' 'unsafe-inline';"
|
|
||||||
rows={2}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<Button
|
<Button
|
||||||
onClick={() => handleSaveSection("api")}
|
onClick={() => handleSaveSection("api")}
|
||||||
disabled={saving === "api"}
|
disabled={saving === "api"}
|
||||||
>
|
>
|
||||||
<Save className="mr-2 h-4 w-4" />
|
<Save className="mr-2 h-4 w-4" />
|
||||||
{saving === "api" ? "Saving..." : "Save API & Security Settings"}
|
{saving === "api" ? "Saving..." : "Save API Settings"}
|
||||||
</Button>
|
</Button>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
@@ -933,13 +622,6 @@ function SettingsPageContent() {
|
|||||||
/>
|
/>
|
||||||
<Label>Budget Alerts</Label>
|
<Label>Budget Alerts</Label>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex items-center space-x-2">
|
|
||||||
<Switch
|
|
||||||
checked={settings.notifications.security_alerts}
|
|
||||||
onCheckedChange={(checked) => updateSetting("notifications", "security_alerts", checked)}
|
|
||||||
/>
|
|
||||||
<Label>Security Alerts</Label>
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center space-x-2">
|
<div className="flex items-center space-x-2">
|
||||||
<Switch
|
<Switch
|
||||||
checked={settings.notifications.system_alerts}
|
checked={settings.notifications.system_alerts}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
"use client"
|
"use client"
|
||||||
|
|
||||||
import { useAuth } from "@/contexts/AuthContext"
|
import { useAuth } from "@/components/providers/auth-provider"
|
||||||
import { Button } from "@/components/ui/button"
|
import { Button } from "@/components/ui/button"
|
||||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"
|
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"
|
||||||
import { useState } from "react"
|
import { useState } from "react"
|
||||||
@@ -27,15 +27,17 @@ export default function TestAuthPage() {
|
|||||||
const expiry = tokenManager.getTokenExpiry()
|
const expiry = tokenManager.getTokenExpiry()
|
||||||
const refreshExpiry = tokenManager.getRefreshTokenExpiry()
|
const refreshExpiry = tokenManager.getRefreshTokenExpiry()
|
||||||
|
|
||||||
if (!expiry) return "No token"
|
if (!expiry.access_token_expiry) return "No token"
|
||||||
|
|
||||||
const now = new Date()
|
const now = new Date()
|
||||||
const timeUntilExpiry = Math.floor((expiry.getTime() - now.getTime()) / 1000)
|
const accessTimeUntilExpiry = Math.floor((expiry.access_token_expiry - now.getTime() / 1000))
|
||||||
|
const refreshTimeUntilExpiry = refreshExpiry ? Math.floor((refreshExpiry - now.getTime() / 1000)) : null
|
||||||
|
|
||||||
return `
|
return `
|
||||||
Token expires in: ${Math.floor(timeUntilExpiry / 60)} minutes ${timeUntilExpiry % 60} seconds
|
Access token expires in: ${Math.floor(accessTimeUntilExpiry / 60)} minutes ${accessTimeUntilExpiry % 60} seconds
|
||||||
Access token expiry: ${expiry.toLocaleString()}
|
Refresh token expires in: ${refreshTimeUntilExpiry ? `${Math.floor(refreshTimeUntilExpiry / 60)} minutes ${refreshTimeUntilExpiry % 60} seconds` : 'N/A'}
|
||||||
Refresh token expiry: ${refreshExpiry?.toLocaleString() || 'N/A'}
|
Access token expiry: ${new Date(expiry.access_token_expiry * 1000).toLocaleString()}
|
||||||
|
Refresh token expiry: ${refreshExpiry ? new Date(refreshExpiry * 1000).toLocaleString() : 'N/A'}
|
||||||
Authenticated: ${tokenManager.isAuthenticated()}
|
Authenticated: ${tokenManager.isAuthenticated()}
|
||||||
`
|
`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import { useEffect, useState } from "react"
|
import { useEffect, useState } from "react"
|
||||||
import { useRouter } from "next/navigation"
|
import { useRouter } from "next/navigation"
|
||||||
import { useAuth } from "@/contexts/AuthContext"
|
import { useAuth } from "@/components/providers/auth-provider"
|
||||||
|
|
||||||
interface ProtectedRouteProps {
|
interface ProtectedRouteProps {
|
||||||
children: React.ReactNode
|
children: React.ReactNode
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import { Separator } from "@/components/ui/separator"
|
|||||||
import { MessageCircle, Send, Bot, User, Loader2, Copy, ThumbsUp, ThumbsDown } from "lucide-react"
|
import { MessageCircle, Send, Bot, User, Loader2, Copy, ThumbsUp, ThumbsDown } from "lucide-react"
|
||||||
import { useToast } from "@/hooks/use-toast"
|
import { useToast } from "@/hooks/use-toast"
|
||||||
import { generateTimestampId } from "@/lib/id-utils"
|
import { generateTimestampId } from "@/lib/id-utils"
|
||||||
import { chatbotApi, type AppError } from "@/lib/api-client"
|
import { chatbotApi } from "@/lib/api-client"
|
||||||
import ReactMarkdown from "react-markdown"
|
import ReactMarkdown from "react-markdown"
|
||||||
import remarkGfm from "remark-gfm"
|
import remarkGfm from "remark-gfm"
|
||||||
import rehypeHighlight from "rehype-highlight"
|
import rehypeHighlight from "rehype-highlight"
|
||||||
@@ -118,6 +118,16 @@ export function ChatInterface({ chatbotId, chatbotName, onClose }: ChatInterface
|
|||||||
setInput("")
|
setInput("")
|
||||||
setIsLoading(true)
|
setIsLoading(true)
|
||||||
|
|
||||||
|
// Enhanced logging for debugging
|
||||||
|
const debugInfo = {
|
||||||
|
chatbotId,
|
||||||
|
messageLength: messageToSend.length,
|
||||||
|
conversationId,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
messagesCount: messages.length
|
||||||
|
}
|
||||||
|
console.log('=== CHAT REQUEST DEBUG ===', debugInfo)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let data: any
|
let data: any
|
||||||
|
|
||||||
@@ -135,9 +145,9 @@ export function ChatInterface({ chatbotId, chatbotName, onClose }: ChatInterface
|
|||||||
)
|
)
|
||||||
|
|
||||||
const assistantMessage: ChatMessage = {
|
const assistantMessage: ChatMessage = {
|
||||||
id: data.message_id || generateTimestampId('msg'),
|
id: data.id || generateTimestampId('msg'),
|
||||||
role: 'assistant',
|
role: 'assistant',
|
||||||
content: data.response,
|
content: data.choices?.[0]?.message?.content || data.response || 'No response',
|
||||||
timestamp: new Date(),
|
timestamp: new Date(),
|
||||||
sources: data.sources
|
sources: data.sources
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ import {
|
|||||||
AlertCircle
|
AlertCircle
|
||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import { usePlugin, type PluginInfo, type AvailablePlugin } from '../../contexts/PluginContext';
|
import { usePlugin, type PluginInfo, type AvailablePlugin } from '../../contexts/PluginContext';
|
||||||
import { useAuth } from '../../contexts/AuthContext';
|
import { useAuth } from '@/components/providers/auth-provider';
|
||||||
import { PluginConfigurationDialog } from './PluginConfigurationDialog';
|
import { PluginConfigurationDialog } from './PluginConfigurationDialog';
|
||||||
|
|
||||||
interface PluginCardProps {
|
interface PluginCardProps {
|
||||||
|
|||||||
@@ -8,7 +8,8 @@ import { Alert, AlertDescription } from '@/components/ui/alert';
|
|||||||
import { Card, CardContent } from '@/components/ui/card';
|
import { Card, CardContent } from '@/components/ui/card';
|
||||||
import { Skeleton } from '@/components/ui/skeleton';
|
import { Skeleton } from '@/components/ui/skeleton';
|
||||||
import { AlertCircle, Loader2 } from 'lucide-react';
|
import { AlertCircle, Loader2 } from 'lucide-react';
|
||||||
import { useAuth } from '../../contexts/AuthContext';
|
import { useAuth } from '@/components/providers/auth-provider';
|
||||||
|
import { tokenManager } from '@/lib/token-manager';
|
||||||
import { usePlugin, type PluginInfo } from '../../contexts/PluginContext';
|
import { usePlugin, type PluginInfo } from '../../contexts/PluginContext';
|
||||||
import { config } from '../../lib/config';
|
import { config } from '../../lib/config';
|
||||||
|
|
||||||
@@ -48,8 +49,8 @@ const PluginIframe: React.FC<PluginIframeProps> = ({
|
|||||||
// Validate origin - should be from our backend
|
// Validate origin - should be from our backend
|
||||||
const allowedOrigins = [
|
const allowedOrigins = [
|
||||||
window.location.origin,
|
window.location.origin,
|
||||||
config.getBackendUrl(),
|
config.API_BASE_URL,
|
||||||
config.getApiUrl()
|
config.API_BASE_URL
|
||||||
].filter(Boolean);
|
].filter(Boolean);
|
||||||
|
|
||||||
if (!allowedOrigins.some(origin => event.origin.startsWith(origin))) {
|
if (!allowedOrigins.some(origin => event.origin.startsWith(origin))) {
|
||||||
@@ -161,7 +162,8 @@ export const PluginPageRenderer: React.FC<PluginPageRendererProps> = ({
|
|||||||
pagePath,
|
pagePath,
|
||||||
componentName
|
componentName
|
||||||
}) => {
|
}) => {
|
||||||
const { user, token } = useAuth();
|
const { user } = useAuth();
|
||||||
|
const token = tokenManager.getAccessToken();
|
||||||
const {
|
const {
|
||||||
installedPlugins,
|
installedPlugins,
|
||||||
getPluginPages,
|
getPluginPages,
|
||||||
|
|||||||
@@ -3,11 +3,13 @@
|
|||||||
import * as React from "react"
|
import * as React from "react"
|
||||||
import { createContext, useContext, useEffect, useState } from "react"
|
import { createContext, useContext, useEffect, useState } from "react"
|
||||||
import { apiClient } from "@/lib/api-client"
|
import { apiClient } from "@/lib/api-client"
|
||||||
|
import { tokenManager } from "@/lib/token-manager"
|
||||||
|
|
||||||
interface User {
|
interface User {
|
||||||
id: string
|
id: string
|
||||||
username: string
|
username: string
|
||||||
email: string
|
email: string
|
||||||
|
name?: string
|
||||||
role: string
|
role: string
|
||||||
permissions: string[]
|
permissions: string[]
|
||||||
created_at: string
|
created_at: string
|
||||||
@@ -17,7 +19,8 @@ interface User {
|
|||||||
interface AuthContextType {
|
interface AuthContextType {
|
||||||
user: User | null
|
user: User | null
|
||||||
isLoading: boolean
|
isLoading: boolean
|
||||||
login: (username: string, password: string) => Promise<void>
|
isAuthenticated: boolean
|
||||||
|
login: (email: string, password: string) => Promise<void>
|
||||||
logout: () => void
|
logout: () => void
|
||||||
register: (username: string, email: string, password: string) => Promise<void>
|
register: (username: string, email: string, password: string) => Promise<void>
|
||||||
refreshToken: () => Promise<void>
|
refreshToken: () => Promise<void>
|
||||||
@@ -39,7 +42,7 @@ export function AuthProvider({ children }: { children: React.ReactNode }) {
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// Check for existing token on mount
|
// Check for existing token on mount
|
||||||
const token = localStorage.getItem("access_token")
|
const token = tokenManager.getAccessToken()
|
||||||
if (token) {
|
if (token) {
|
||||||
// Validate token and get user info
|
// Validate token and get user info
|
||||||
validateToken(token)
|
validateToken(token)
|
||||||
@@ -50,33 +53,21 @@ export function AuthProvider({ children }: { children: React.ReactNode }) {
|
|||||||
|
|
||||||
const validateToken = async (token: string) => {
|
const validateToken = async (token: string) => {
|
||||||
try {
|
try {
|
||||||
// Temporarily set token in localStorage for apiClient to use
|
|
||||||
const previousToken = localStorage.getItem('token')
|
|
||||||
localStorage.setItem('token', token)
|
|
||||||
|
|
||||||
const userData = await apiClient.get("/api-internal/v1/auth/me")
|
const userData = await apiClient.get("/api-internal/v1/auth/me")
|
||||||
setUser(userData)
|
setUser(userData)
|
||||||
|
|
||||||
// Restore previous token if different
|
|
||||||
if (previousToken && previousToken !== token) {
|
|
||||||
localStorage.setItem('token', previousToken)
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
localStorage.removeItem("access_token")
|
tokenManager.clearTokens()
|
||||||
localStorage.removeItem("refresh_token")
|
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false)
|
setIsLoading(false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const login = async (username: string, password: string) => {
|
const login = async (email: string, password: string) => {
|
||||||
try {
|
try {
|
||||||
const data = await apiClient.post("/api-internal/v1/auth/login", { username, password })
|
const data = await apiClient.post("/api-internal/v1/auth/login", { email, password })
|
||||||
|
|
||||||
// Store tokens
|
// Store tokens using tokenManager
|
||||||
localStorage.setItem("access_token", data.access_token)
|
tokenManager.setTokens(data.access_token, data.refresh_token)
|
||||||
localStorage.setItem("refresh_token", data.refresh_token)
|
|
||||||
localStorage.setItem("token", data.access_token) // Also set token for apiClient
|
|
||||||
|
|
||||||
// Get user info
|
// Get user info
|
||||||
await validateToken(data.access_token)
|
await validateToken(data.access_token)
|
||||||
@@ -89,10 +80,8 @@ export function AuthProvider({ children }: { children: React.ReactNode }) {
|
|||||||
try {
|
try {
|
||||||
const data = await apiClient.post("/api-internal/v1/auth/register", { username, email, password })
|
const data = await apiClient.post("/api-internal/v1/auth/register", { username, email, password })
|
||||||
|
|
||||||
// Store tokens
|
// Store tokens using tokenManager
|
||||||
localStorage.setItem("access_token", data.access_token)
|
tokenManager.setTokens(data.access_token, data.refresh_token)
|
||||||
localStorage.setItem("refresh_token", data.refresh_token)
|
|
||||||
localStorage.setItem("token", data.access_token) // Also set token for apiClient
|
|
||||||
|
|
||||||
// Get user info
|
// Get user info
|
||||||
await validateToken(data.access_token)
|
await validateToken(data.access_token)
|
||||||
@@ -102,22 +91,19 @@ export function AuthProvider({ children }: { children: React.ReactNode }) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const logout = () => {
|
const logout = () => {
|
||||||
localStorage.removeItem("access_token")
|
tokenManager.clearTokens()
|
||||||
localStorage.removeItem("refresh_token")
|
|
||||||
localStorage.removeItem("token") // Also clear token for apiClient
|
|
||||||
setUser(null)
|
setUser(null)
|
||||||
}
|
}
|
||||||
|
|
||||||
const refreshToken = async () => {
|
const refreshToken = async () => {
|
||||||
try {
|
try {
|
||||||
const refresh_token = localStorage.getItem("refresh_token")
|
const refresh_token = tokenManager.getRefreshToken()
|
||||||
if (!refresh_token) {
|
if (!refresh_token) {
|
||||||
throw new Error("No refresh token available")
|
throw new Error("No refresh token available")
|
||||||
}
|
}
|
||||||
|
|
||||||
const data = await apiClient.post("/api-internal/v1/auth/refresh", { refresh_token })
|
const data = await apiClient.post("/api-internal/v1/auth/refresh", { refresh_token })
|
||||||
localStorage.setItem("access_token", data.access_token)
|
tokenManager.setTokens(data.access_token, refresh_token)
|
||||||
localStorage.setItem("token", data.access_token) // Also set token for apiClient
|
|
||||||
|
|
||||||
return data.access_token
|
return data.access_token
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -130,6 +116,7 @@ export function AuthProvider({ children }: { children: React.ReactNode }) {
|
|||||||
const value: AuthContextType = {
|
const value: AuthContextType = {
|
||||||
user,
|
user,
|
||||||
isLoading,
|
isLoading,
|
||||||
|
isAuthenticated: !!user,
|
||||||
login,
|
login,
|
||||||
logout,
|
logout,
|
||||||
register,
|
register,
|
||||||
|
|||||||
@@ -91,8 +91,9 @@ export function DocumentUpload({ collections, selectedCollection, onDocumentUplo
|
|||||||
updateProgress(60)
|
updateProgress(60)
|
||||||
|
|
||||||
await uploadFile(
|
await uploadFile(
|
||||||
'/api-internal/v1/rag/documents',
|
|
||||||
uploadingFile.file,
|
uploadingFile.file,
|
||||||
|
'/api-internal/v1/rag/documents',
|
||||||
|
(progress) => updateProgress(progress),
|
||||||
{ collection_id: targetCollection }
|
{ collection_id: targetCollection }
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import { Button } from "@/components/ui/button"
|
|||||||
import { Badge } from "@/components/ui/badge"
|
import { Badge } from "@/components/ui/badge"
|
||||||
import { ThemeToggle } from "@/components/ui/theme-toggle"
|
import { ThemeToggle } from "@/components/ui/theme-toggle"
|
||||||
import { UserMenu } from "@/components/ui/user-menu"
|
import { UserMenu } from "@/components/ui/user-menu"
|
||||||
import { useAuth } from "@/contexts/AuthContext"
|
import { useAuth } from "@/components/providers/auth-provider"
|
||||||
import { useModules } from "@/contexts/ModulesContext"
|
import { useModules } from "@/contexts/ModulesContext"
|
||||||
import { usePlugin } from "@/contexts/PluginContext"
|
import { usePlugin } from "@/contexts/PluginContext"
|
||||||
import {
|
import {
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import { Badge } from "@/components/ui/badge"
|
|||||||
import { Dialog, DialogContent, DialogDescription, DialogHeader, DialogTitle, DialogTrigger } from "@/components/ui/dialog"
|
import { Dialog, DialogContent, DialogDescription, DialogHeader, DialogTitle, DialogTrigger } from "@/components/ui/dialog"
|
||||||
import { Label } from "@/components/ui/label"
|
import { Label } from "@/components/ui/label"
|
||||||
import { Input } from "@/components/ui/input"
|
import { Input } from "@/components/ui/input"
|
||||||
import { useAuth } from "@/contexts/AuthContext"
|
import { useAuth } from "@/components/providers/auth-provider"
|
||||||
import { useToast } from "@/hooks/use-toast"
|
import { useToast } from "@/hooks/use-toast"
|
||||||
import {
|
import {
|
||||||
DropdownMenu,
|
DropdownMenu,
|
||||||
@@ -18,6 +18,16 @@ import {
|
|||||||
import { User, Settings, Lock, LogOut, ChevronDown } from "lucide-react"
|
import { User, Settings, Lock, LogOut, ChevronDown } from "lucide-react"
|
||||||
import { useState } from "react"
|
import { useState } from "react"
|
||||||
|
|
||||||
|
// Helper function to get API URL with proper protocol
|
||||||
|
const getApiUrl = () => {
|
||||||
|
if (typeof window !== 'undefined') {
|
||||||
|
const protocol = window.location.protocol.slice(0, -1) // Remove ':' from 'https:'
|
||||||
|
const host = window.location.hostname
|
||||||
|
return `${protocol}://${host}`
|
||||||
|
}
|
||||||
|
return `http://${process.env.NEXT_PUBLIC_BASE_URL || 'localhost'}`
|
||||||
|
}
|
||||||
|
|
||||||
export function UserMenu() {
|
export function UserMenu() {
|
||||||
const { user, logout } = useAuth()
|
const { user, logout } = useAuth()
|
||||||
const { toast } = useToast()
|
const { toast } = useToast()
|
||||||
@@ -62,7 +72,7 @@ export function UserMenu() {
|
|||||||
throw new Error('Authentication required')
|
throw new Error('Authentication required')
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await fetch('/api-internal/v1/auth/change-password', {
|
const response = await fetch(`${getApiUrl()}/api-internal/v1/auth/change-password`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
|
|||||||
@@ -1,179 +0,0 @@
|
|||||||
"use client"
|
|
||||||
|
|
||||||
import { createContext, useContext, useState, useEffect, ReactNode } from "react"
|
|
||||||
import { useRouter } from "next/navigation"
|
|
||||||
import { tokenManager } from "@/lib/token-manager"
|
|
||||||
|
|
||||||
interface User {
|
|
||||||
id: string
|
|
||||||
email: string
|
|
||||||
name: string
|
|
||||||
role: string
|
|
||||||
}
|
|
||||||
|
|
||||||
interface AuthContextType {
|
|
||||||
user: User | null
|
|
||||||
isAuthenticated: boolean
|
|
||||||
login: (email: string, password: string) => Promise<void>
|
|
||||||
logout: () => void
|
|
||||||
isLoading: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
const AuthContext = createContext<AuthContextType | undefined>(undefined)
|
|
||||||
|
|
||||||
export function AuthProvider({ children }: { children: ReactNode }) {
|
|
||||||
const [user, setUser] = useState<User | null>(null)
|
|
||||||
const [isLoading, setIsLoading] = useState(true)
|
|
||||||
const router = useRouter()
|
|
||||||
|
|
||||||
// Initialize auth state and listen to token manager events
|
|
||||||
useEffect(() => {
|
|
||||||
const initAuth = async () => {
|
|
||||||
// Check if we have valid tokens
|
|
||||||
if (tokenManager.isAuthenticated()) {
|
|
||||||
// Try to get user info
|
|
||||||
await fetchUserInfo()
|
|
||||||
}
|
|
||||||
setIsLoading(false)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set up event listeners
|
|
||||||
const handleTokensUpdated = () => {
|
|
||||||
// Tokens were updated (refreshed), update user if needed
|
|
||||||
if (!user) {
|
|
||||||
fetchUserInfo()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleTokensCleared = () => {
|
|
||||||
// Tokens were cleared, clear user
|
|
||||||
setUser(null)
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleSessionExpired = (reason: string) => {
|
|
||||||
console.log('Session expired:', reason)
|
|
||||||
setUser(null)
|
|
||||||
// TokenManager and API client will handle redirect
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleLogout = () => {
|
|
||||||
setUser(null)
|
|
||||||
router.push('/login')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register event listeners
|
|
||||||
tokenManager.on('tokensUpdated', handleTokensUpdated)
|
|
||||||
tokenManager.on('tokensCleared', handleTokensCleared)
|
|
||||||
tokenManager.on('sessionExpired', handleSessionExpired)
|
|
||||||
tokenManager.on('logout', handleLogout)
|
|
||||||
|
|
||||||
// Initialize
|
|
||||||
initAuth()
|
|
||||||
|
|
||||||
// Cleanup
|
|
||||||
return () => {
|
|
||||||
tokenManager.off('tokensUpdated', handleTokensUpdated)
|
|
||||||
tokenManager.off('tokensCleared', handleTokensCleared)
|
|
||||||
tokenManager.off('sessionExpired', handleSessionExpired)
|
|
||||||
tokenManager.off('logout', handleLogout)
|
|
||||||
}
|
|
||||||
}, [])
|
|
||||||
|
|
||||||
const fetchUserInfo = async () => {
|
|
||||||
try {
|
|
||||||
const token = await tokenManager.getAccessToken()
|
|
||||||
if (!token) return
|
|
||||||
|
|
||||||
const response = await fetch('/api-internal/v1/auth/me', {
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${token}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
const userData = await response.json()
|
|
||||||
const user = {
|
|
||||||
id: userData.id || userData.sub,
|
|
||||||
email: userData.email,
|
|
||||||
name: userData.name || userData.email,
|
|
||||||
role: userData.role || 'user',
|
|
||||||
}
|
|
||||||
setUser(user)
|
|
||||||
|
|
||||||
// Store user info for offline access
|
|
||||||
if (typeof window !== 'undefined') {
|
|
||||||
localStorage.setItem('user', JSON.stringify(user))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to fetch user info:', error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const login = async (email: string, password: string) => {
|
|
||||||
setIsLoading(true)
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch('/api-internal/v1/auth/login', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({ email, password }),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const error = await response.json()
|
|
||||||
throw new Error(error.detail || 'Invalid credentials')
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
// Store tokens in TokenManager
|
|
||||||
tokenManager.setTokens(
|
|
||||||
data.access_token,
|
|
||||||
data.refresh_token,
|
|
||||||
data.expires_in
|
|
||||||
)
|
|
||||||
|
|
||||||
// Fetch user info
|
|
||||||
await fetchUserInfo()
|
|
||||||
|
|
||||||
// Navigate to dashboard
|
|
||||||
router.push('/dashboard')
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Login error:', error)
|
|
||||||
throw error
|
|
||||||
} finally {
|
|
||||||
setIsLoading(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const logout = () => {
|
|
||||||
tokenManager.logout()
|
|
||||||
// Token manager will emit 'logout' event which we handle above
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<AuthContext.Provider
|
|
||||||
value={{
|
|
||||||
user,
|
|
||||||
isAuthenticated: tokenManager.isAuthenticated(),
|
|
||||||
login,
|
|
||||||
logout,
|
|
||||||
isLoading
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{children}
|
|
||||||
</AuthContext.Provider>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function useAuth() {
|
|
||||||
const context = useContext(AuthContext)
|
|
||||||
if (!context) {
|
|
||||||
throw new Error("useAuth must be used within an AuthProvider")
|
|
||||||
}
|
|
||||||
return context
|
|
||||||
}
|
|
||||||
@@ -69,8 +69,15 @@ export function ModulesProvider({ children }: { children: ReactNode }) {
|
|||||||
setLastUpdated(new Date())
|
setLastUpdated(new Date())
|
||||||
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// Only set error if we're authenticated (to avoid noise on auth pages)
|
// If we get a 401 error, clear the tokens
|
||||||
if (tokenManager.isAuthenticated()) {
|
if (err && typeof err === 'object' && 'response' in err && (err.response as any)?.status === 401) {
|
||||||
|
tokenManager.clearTokens()
|
||||||
|
setModules([])
|
||||||
|
setEnabledModules(new Set())
|
||||||
|
setError(null)
|
||||||
|
setLastUpdated(null)
|
||||||
|
} else if (tokenManager.isAuthenticated()) {
|
||||||
|
// Only set error if we're authenticated (to avoid noise on auth pages)
|
||||||
setError(err instanceof Error ? err.message : "Failed to load modules")
|
setError(err instanceof Error ? err.message : "Failed to load modules")
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
* Plugin Context - Manages plugin state and UI integration
|
* Plugin Context - Manages plugin state and UI integration
|
||||||
*/
|
*/
|
||||||
import React, { createContext, useContext, useState, useEffect, useCallback, ReactNode } from 'react';
|
import React, { createContext, useContext, useState, useEffect, useCallback, ReactNode } from 'react';
|
||||||
import { useAuth } from './AuthContext';
|
import { useAuth } from '@/components/providers/auth-provider';
|
||||||
import { apiClient } from '@/lib/api-client';
|
import { apiClient } from '@/lib/api-client';
|
||||||
|
|
||||||
export interface PluginInfo {
|
export interface PluginInfo {
|
||||||
|
|||||||
150
frontend/src/contexts/ToastContext.tsx
Normal file
150
frontend/src/contexts/ToastContext.tsx
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
"use client"
|
||||||
|
|
||||||
|
import React, { createContext, useContext, useState, useCallback, useRef } from 'react'
|
||||||
|
import { generateShortId } from '@/lib/id-utils'
|
||||||
|
|
||||||
|
export interface ToastProps {
|
||||||
|
id: string
|
||||||
|
title?: string
|
||||||
|
description?: string
|
||||||
|
variant?: 'default' | 'destructive' | 'success' | 'warning'
|
||||||
|
action?: React.ReactElement
|
||||||
|
duration?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ToastOptions extends Omit<ToastProps, 'id'> {
|
||||||
|
duration?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ToastContextType {
|
||||||
|
toasts: ToastProps[]
|
||||||
|
toast: (options: ToastOptions) => () => void
|
||||||
|
success: (title: string, description?: string, options?: Partial<ToastOptions>) => () => void
|
||||||
|
error: (title: string, description?: string, options?: Partial<ToastOptions>) => () => void
|
||||||
|
warning: (title: string, description?: string, options?: Partial<ToastOptions>) => () => void
|
||||||
|
info: (title: string, description?: string, options?: Partial<ToastOptions>) => () => void
|
||||||
|
dismiss: (id: string) => void
|
||||||
|
clearAll: () => void
|
||||||
|
}
|
||||||
|
|
||||||
|
const ToastContext = createContext<ToastContextType | undefined>(undefined)
|
||||||
|
|
||||||
|
export function ToastProvider({ children }: { children: React.ReactNode }) {
|
||||||
|
const [toasts, setToasts] = useState<ToastProps[]>([])
|
||||||
|
const timeoutRefs = useRef<Map<string, NodeJS.Timeout>>(new Map())
|
||||||
|
|
||||||
|
const dismissToast = useCallback((id: string) => {
|
||||||
|
setToasts(prev => prev.filter(toast => toast.id !== id))
|
||||||
|
|
||||||
|
// Clear timeout if exists
|
||||||
|
const timeoutId = timeoutRefs.current.get(id)
|
||||||
|
if (timeoutId) {
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
timeoutRefs.current.delete(id)
|
||||||
|
}
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
const toast = useCallback((options: ToastOptions) => {
|
||||||
|
const {
|
||||||
|
duration = 5000,
|
||||||
|
variant = 'default',
|
||||||
|
...props
|
||||||
|
} = options
|
||||||
|
|
||||||
|
// Generate unique ID using improved utility
|
||||||
|
const id = generateShortId('toast')
|
||||||
|
const toastWithId: ToastProps = {
|
||||||
|
...props,
|
||||||
|
id,
|
||||||
|
variant,
|
||||||
|
duration
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to toasts array
|
||||||
|
setToasts(prev => [...prev, toastWithId])
|
||||||
|
|
||||||
|
// Auto-remove after specified duration
|
||||||
|
if (duration > 0) {
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
dismissToast(id)
|
||||||
|
}, duration)
|
||||||
|
|
||||||
|
timeoutRefs.current.set(id, timeoutId)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return dismiss function for manual control
|
||||||
|
return () => dismissToast(id)
|
||||||
|
}, [dismissToast])
|
||||||
|
|
||||||
|
// Convenience methods for common toast types
|
||||||
|
const success = useCallback((title: string, description?: string, options?: Partial<ToastOptions>) => {
|
||||||
|
return toast({
|
||||||
|
title,
|
||||||
|
description,
|
||||||
|
variant: 'success',
|
||||||
|
...options
|
||||||
|
})
|
||||||
|
}, [toast])
|
||||||
|
|
||||||
|
const error = useCallback((title: string, description?: string, options?: Partial<ToastOptions>) => {
|
||||||
|
return toast({
|
||||||
|
title,
|
||||||
|
description,
|
||||||
|
variant: 'destructive',
|
||||||
|
duration: 7000, // Errors should stay longer
|
||||||
|
...options
|
||||||
|
})
|
||||||
|
}, [toast])
|
||||||
|
|
||||||
|
const warning = useCallback((title: string, description?: string, options?: Partial<ToastOptions>) => {
|
||||||
|
return toast({
|
||||||
|
title,
|
||||||
|
description,
|
||||||
|
variant: 'warning',
|
||||||
|
...options
|
||||||
|
})
|
||||||
|
}, [toast])
|
||||||
|
|
||||||
|
const info = useCallback((title: string, description?: string, options?: Partial<ToastOptions>) => {
|
||||||
|
return toast({
|
||||||
|
title,
|
||||||
|
description,
|
||||||
|
variant: 'default',
|
||||||
|
...options
|
||||||
|
})
|
||||||
|
}, [toast])
|
||||||
|
|
||||||
|
// Clear all toasts
|
||||||
|
const clearAll = useCallback(() => {
|
||||||
|
// Clear all timeouts
|
||||||
|
timeoutRefs.current.forEach(timeoutId => clearTimeout(timeoutId))
|
||||||
|
timeoutRefs.current.clear()
|
||||||
|
|
||||||
|
setToasts([])
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
const value: ToastContextType = {
|
||||||
|
toasts,
|
||||||
|
toast,
|
||||||
|
success,
|
||||||
|
error,
|
||||||
|
warning,
|
||||||
|
info,
|
||||||
|
dismiss: dismissToast,
|
||||||
|
clearAll,
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ToastContext.Provider value={value}>
|
||||||
|
{children}
|
||||||
|
</ToastContext.Provider>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useToast() {
|
||||||
|
const context = useContext(ToastContext)
|
||||||
|
if (context === undefined) {
|
||||||
|
throw new Error('useToast must be used within a ToastProvider')
|
||||||
|
}
|
||||||
|
return context
|
||||||
|
}
|
||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
import { useState, useCallback, useMemo } from 'react'
|
import { useState, useCallback, useMemo } from 'react'
|
||||||
import { generateId } from '@/lib/id-utils'
|
import { generateId } from '@/lib/id-utils'
|
||||||
import { chatbotApi, type AppError } from '@/lib/api-client'
|
import { chatbotApi } from '@/lib/api-client'
|
||||||
import { useToast } from './use-toast'
|
import { useToast } from './use-toast'
|
||||||
|
|
||||||
export interface ChatbotConfig {
|
export interface ChatbotConfig {
|
||||||
@@ -59,10 +59,10 @@ export function useChatbotForm() {
|
|||||||
const loadChatbots = useCallback(async () => {
|
const loadChatbots = useCallback(async () => {
|
||||||
setIsLoading(true)
|
setIsLoading(true)
|
||||||
try {
|
try {
|
||||||
const data = await chatbotApi.listChatbots()
|
const data = await chatbotApi.list()
|
||||||
setChatbots(data)
|
setChatbots(data)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const appError = error as AppError
|
console.error('Load chatbots error:', error)
|
||||||
toast.error("Loading Failed", "Failed to load chatbots")
|
toast.error("Loading Failed", "Failed to load chatbots")
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false)
|
setIsLoading(false)
|
||||||
@@ -73,15 +73,20 @@ export function useChatbotForm() {
|
|||||||
const createChatbot = useCallback(async (config: ChatbotConfig) => {
|
const createChatbot = useCallback(async (config: ChatbotConfig) => {
|
||||||
setIsSubmitting(true)
|
setIsSubmitting(true)
|
||||||
try {
|
try {
|
||||||
const newChatbot = await chatbotApi.createChatbot(config)
|
const newChatbot = await chatbotApi.create(config)
|
||||||
setChatbots(prev => [...prev, newChatbot])
|
setChatbots(prev => [...prev, newChatbot])
|
||||||
toast.success("Success", `Chatbot "${config.name}" created successfully`)
|
toast.success("Success", `Chatbot "${config.name}" created successfully`)
|
||||||
return newChatbot
|
return newChatbot
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const appError = error as AppError
|
console.error('Create chatbot error:', error)
|
||||||
|
|
||||||
if (appError.code === 'VALIDATION_ERROR') {
|
if (error && typeof error === 'object' && 'response' in error) {
|
||||||
toast.error("Validation Error", appError.details || "Please check your input")
|
const detail = error.response?.data?.detail || error.response?.data?.error
|
||||||
|
if (detail) {
|
||||||
|
toast.error("Validation Error", detail)
|
||||||
|
} else {
|
||||||
|
toast.error("Creation Failed", "Failed to create chatbot")
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
toast.error("Creation Failed", "Failed to create chatbot")
|
toast.error("Creation Failed", "Failed to create chatbot")
|
||||||
}
|
}
|
||||||
@@ -95,12 +100,12 @@ export function useChatbotForm() {
|
|||||||
const updateChatbot = useCallback(async (id: string, config: ChatbotConfig) => {
|
const updateChatbot = useCallback(async (id: string, config: ChatbotConfig) => {
|
||||||
setIsSubmitting(true)
|
setIsSubmitting(true)
|
||||||
try {
|
try {
|
||||||
const updatedChatbot = await chatbotApi.updateChatbot(id, config)
|
const updatedChatbot = await chatbotApi.update(id, config)
|
||||||
setChatbots(prev => prev.map(bot => bot.id === id ? updatedChatbot : bot))
|
setChatbots(prev => prev.map(bot => bot.id === id ? updatedChatbot : bot))
|
||||||
toast.success("Success", `Chatbot "${config.name}" updated successfully`)
|
toast.success("Success", `Chatbot "${config.name}" updated successfully`)
|
||||||
return updatedChatbot
|
return updatedChatbot
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const appError = error as AppError
|
console.error('Update chatbot error:', error)
|
||||||
toast.error("Update Failed", "Failed to update chatbot")
|
toast.error("Update Failed", "Failed to update chatbot")
|
||||||
throw error
|
throw error
|
||||||
} finally {
|
} finally {
|
||||||
@@ -112,11 +117,11 @@ export function useChatbotForm() {
|
|||||||
const deleteChatbot = useCallback(async (id: string) => {
|
const deleteChatbot = useCallback(async (id: string) => {
|
||||||
setIsSubmitting(true)
|
setIsSubmitting(true)
|
||||||
try {
|
try {
|
||||||
await chatbotApi.deleteChatbot(id)
|
await chatbotApi.delete(id)
|
||||||
setChatbots(prev => prev.filter(bot => bot.id !== id))
|
setChatbots(prev => prev.filter(bot => bot.id !== id))
|
||||||
toast.success("Success", "Chatbot deleted successfully")
|
toast.success("Success", "Chatbot deleted successfully")
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const appError = error as AppError
|
console.error('Delete chatbot error:', error)
|
||||||
toast.error("Deletion Failed", "Failed to delete chatbot")
|
toast.error("Deletion Failed", "Failed to delete chatbot")
|
||||||
throw error
|
throw error
|
||||||
} finally {
|
} finally {
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
export interface AppError extends Error {
|
export interface AppError extends Error {
|
||||||
code: 'UNAUTHORIZED' | 'NETWORK_ERROR' | 'VALIDATION_ERROR' | 'NOT_FOUND' | 'FORBIDDEN' | 'TIMEOUT' | 'UNKNOWN'
|
code: 'UNAUTHORIZED' | 'NETWORK_ERROR' | 'VALIDATION_ERROR' | 'NOT_FOUND' | 'FORBIDDEN' | 'TIMEOUT' | 'UNKNOWN'
|
||||||
status?: number
|
status?: number
|
||||||
|
|||||||
@@ -12,4 +12,3 @@ export const config = {
|
|||||||
return process.env.NEXT_PUBLIC_APP_NAME || 'Enclava'
|
return process.env.NEXT_PUBLIC_APP_NAME || 'Enclava'
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
174
frontend/src/lib/error-utils.ts
Normal file
174
frontend/src/lib/error-utils.ts
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
/**
|
||||||
|
* Utility functions for error handling and user feedback
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface AppError {
|
||||||
|
code: string
|
||||||
|
message: string
|
||||||
|
details?: string
|
||||||
|
retryable?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export const ERROR_CODES = {
|
||||||
|
NETWORK_ERROR: 'NETWORK_ERROR',
|
||||||
|
UNAUTHORIZED: 'UNAUTHORIZED',
|
||||||
|
VALIDATION_ERROR: 'VALIDATION_ERROR',
|
||||||
|
TIMEOUT_ERROR: 'TIMEOUT_ERROR',
|
||||||
|
SERVER_ERROR: 'SERVER_ERROR',
|
||||||
|
UNKNOWN_ERROR: 'UNKNOWN_ERROR',
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts various error types into standardized AppError format
|
||||||
|
*/
|
||||||
|
export function normalizeError(error: unknown): AppError {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
// Network or fetch errors
|
||||||
|
if (error.name === 'TypeError' && error.message.includes('fetch')) {
|
||||||
|
return {
|
||||||
|
code: ERROR_CODES.NETWORK_ERROR,
|
||||||
|
message: 'Unable to connect to server. Please check your internet connection.',
|
||||||
|
retryable: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Timeout errors
|
||||||
|
if (error.name === 'AbortError' || error.message.includes('timeout')) {
|
||||||
|
return {
|
||||||
|
code: ERROR_CODES.TIMEOUT_ERROR,
|
||||||
|
message: 'Request timed out. Please try again.',
|
||||||
|
retryable: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
code: ERROR_CODES.UNKNOWN_ERROR,
|
||||||
|
message: error.message || 'An unexpected error occurred',
|
||||||
|
details: error.stack,
|
||||||
|
retryable: false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof error === 'string') {
|
||||||
|
return {
|
||||||
|
code: ERROR_CODES.UNKNOWN_ERROR,
|
||||||
|
message: error,
|
||||||
|
retryable: false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
code: ERROR_CODES.UNKNOWN_ERROR,
|
||||||
|
message: 'An unknown error occurred',
|
||||||
|
retryable: false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles HTTP response errors
|
||||||
|
*/
|
||||||
|
export async function handleHttpError(response: Response): Promise<AppError> {
|
||||||
|
let errorDetails: string
|
||||||
|
|
||||||
|
try {
|
||||||
|
const errorData = await response.json()
|
||||||
|
errorDetails = errorData.error || errorData.message || 'Unknown error'
|
||||||
|
} catch {
|
||||||
|
try {
|
||||||
|
// Use the cloned response for text reading since original body was consumed
|
||||||
|
const responseClone = response.clone()
|
||||||
|
errorDetails = await responseClone.text()
|
||||||
|
} catch {
|
||||||
|
errorDetails = `HTTP ${response.status} error`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (response.status) {
|
||||||
|
case 401:
|
||||||
|
return {
|
||||||
|
code: ERROR_CODES.UNAUTHORIZED,
|
||||||
|
message: 'You need to log in to continue',
|
||||||
|
details: errorDetails,
|
||||||
|
retryable: false
|
||||||
|
}
|
||||||
|
|
||||||
|
case 400:
|
||||||
|
return {
|
||||||
|
code: ERROR_CODES.VALIDATION_ERROR,
|
||||||
|
message: 'Invalid request. Please check your input.',
|
||||||
|
details: errorDetails,
|
||||||
|
retryable: false
|
||||||
|
}
|
||||||
|
|
||||||
|
case 429:
|
||||||
|
return {
|
||||||
|
code: ERROR_CODES.SERVER_ERROR,
|
||||||
|
message: 'Too many requests. Please wait a moment and try again.',
|
||||||
|
details: errorDetails,
|
||||||
|
retryable: true
|
||||||
|
}
|
||||||
|
|
||||||
|
case 500:
|
||||||
|
case 502:
|
||||||
|
case 503:
|
||||||
|
case 504:
|
||||||
|
return {
|
||||||
|
code: ERROR_CODES.SERVER_ERROR,
|
||||||
|
message: 'Server error. Please try again in a moment.',
|
||||||
|
details: errorDetails,
|
||||||
|
retryable: true
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
return {
|
||||||
|
code: ERROR_CODES.SERVER_ERROR,
|
||||||
|
message: `Request failed (${response.status}): ${errorDetails}`,
|
||||||
|
details: errorDetails,
|
||||||
|
retryable: response.status >= 500
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retry wrapper with exponential backoff
|
||||||
|
*/
|
||||||
|
export async function withRetry<T>(
|
||||||
|
fn: () => Promise<T>,
|
||||||
|
options: {
|
||||||
|
maxAttempts?: number
|
||||||
|
initialDelay?: number
|
||||||
|
maxDelay?: number
|
||||||
|
backoffMultiplier?: number
|
||||||
|
} = {}
|
||||||
|
): Promise<T> {
|
||||||
|
const {
|
||||||
|
maxAttempts = 3,
|
||||||
|
initialDelay = 1000,
|
||||||
|
maxDelay = 10000,
|
||||||
|
backoffMultiplier = 2
|
||||||
|
} = options
|
||||||
|
|
||||||
|
let lastError: unknown
|
||||||
|
let delay = initialDelay
|
||||||
|
|
||||||
|
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||||
|
try {
|
||||||
|
return await fn()
|
||||||
|
} catch (error) {
|
||||||
|
lastError = error
|
||||||
|
|
||||||
|
const appError = normalizeError(error)
|
||||||
|
|
||||||
|
// Don't retry non-retryable errors
|
||||||
|
if (!appError.retryable || attempt === maxAttempts) {
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait before retrying
|
||||||
|
await new Promise(resolve => setTimeout(resolve, delay))
|
||||||
|
delay = Math.min(delay * backoffMultiplier, maxDelay)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw lastError
|
||||||
|
}
|
||||||
@@ -13,4 +13,3 @@ export function generateTimestampId(prefix = "id"): string {
|
|||||||
const rand = Math.floor(Math.random() * 1000).toString().padStart(3, '0')
|
const rand = Math.floor(Math.random() * 1000).toString().padStart(3, '0')
|
||||||
return `${prefix}_${ts}_${rand}`
|
return `${prefix}_${ts}_${rand}`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
301
frontend/src/lib/performance.ts
Normal file
301
frontend/src/lib/performance.ts
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
/**
|
||||||
|
* Performance monitoring and optimization utilities
|
||||||
|
*/
|
||||||
|
|
||||||
|
import React from 'react'
|
||||||
|
|
||||||
|
export interface PerformanceMetric {
|
||||||
|
name: string
|
||||||
|
value: number
|
||||||
|
timestamp: number
|
||||||
|
metadata?: Record<string, any>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PerformanceReport {
|
||||||
|
metrics: PerformanceMetric[]
|
||||||
|
summary: {
|
||||||
|
averageResponseTime: number
|
||||||
|
totalRequests: number
|
||||||
|
errorRate: number
|
||||||
|
slowestRequests: PerformanceMetric[]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class PerformanceMonitor {
|
||||||
|
private metrics: PerformanceMetric[] = []
|
||||||
|
private maxMetrics = 1000 // Keep last 1000 metrics
|
||||||
|
private enabled = process.env.NODE_ENV === 'development'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start timing an operation
|
||||||
|
*/
|
||||||
|
startTiming(name: string, metadata?: Record<string, any>): () => void {
|
||||||
|
if (!this.enabled) {
|
||||||
|
return () => {} // No-op in production
|
||||||
|
}
|
||||||
|
|
||||||
|
const startTime = performance.now()
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
const duration = performance.now() - startTime
|
||||||
|
this.recordMetric(name, duration, metadata)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a performance metric
|
||||||
|
*/
|
||||||
|
recordMetric(name: string, value: number, metadata?: Record<string, any>): void {
|
||||||
|
if (!this.enabled) return
|
||||||
|
|
||||||
|
const metric: PerformanceMetric = {
|
||||||
|
name,
|
||||||
|
value,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
metadata
|
||||||
|
}
|
||||||
|
|
||||||
|
this.metrics.push(metric)
|
||||||
|
|
||||||
|
// Keep only the most recent metrics
|
||||||
|
if (this.metrics.length > this.maxMetrics) {
|
||||||
|
this.metrics = this.metrics.slice(-this.maxMetrics)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log slow operations
|
||||||
|
if (value > 1000) { // Slower than 1 second
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Measure and track API calls
|
||||||
|
*/
|
||||||
|
async trackApiCall<T>(
|
||||||
|
name: string,
|
||||||
|
apiCall: () => Promise<T>,
|
||||||
|
metadata?: Record<string, any>
|
||||||
|
): Promise<T> {
|
||||||
|
const endTiming = this.startTiming(`api_${name}`, metadata)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await apiCall()
|
||||||
|
endTiming()
|
||||||
|
return result
|
||||||
|
} catch (error) {
|
||||||
|
endTiming()
|
||||||
|
this.recordMetric(`api_${name}_error`, 1, {
|
||||||
|
...metadata,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error'
|
||||||
|
})
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Track React component render times
|
||||||
|
*/
|
||||||
|
trackComponentRender(componentName: string, renderCount: number = 1): void {
|
||||||
|
this.recordMetric(`render_${componentName}`, renderCount)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get performance report
|
||||||
|
*/
|
||||||
|
getReport(): PerformanceReport {
|
||||||
|
const apiMetrics = this.metrics.filter(m => m.name.startsWith('api_'))
|
||||||
|
const errorMetrics = this.metrics.filter(m => m.name.includes('_error'))
|
||||||
|
|
||||||
|
const totalRequests = apiMetrics.length
|
||||||
|
const errorRate = totalRequests > 0 ? (errorMetrics.length / totalRequests) * 100 : 0
|
||||||
|
|
||||||
|
const responseTimes = apiMetrics.map(m => m.value)
|
||||||
|
const averageResponseTime = responseTimes.length > 0
|
||||||
|
? responseTimes.reduce((sum, time) => sum + time, 0) / responseTimes.length
|
||||||
|
: 0
|
||||||
|
|
||||||
|
const slowestRequests = [...apiMetrics]
|
||||||
|
.sort((a, b) => b.value - a.value)
|
||||||
|
.slice(0, 10)
|
||||||
|
|
||||||
|
return {
|
||||||
|
metrics: this.metrics,
|
||||||
|
summary: {
|
||||||
|
averageResponseTime,
|
||||||
|
totalRequests,
|
||||||
|
errorRate,
|
||||||
|
slowestRequests
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all metrics
|
||||||
|
*/
|
||||||
|
clear(): void {
|
||||||
|
this.metrics = []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enable/disable monitoring
|
||||||
|
*/
|
||||||
|
setEnabled(enabled: boolean): void {
|
||||||
|
this.enabled = enabled
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Export metrics for analysis
|
||||||
|
*/
|
||||||
|
exportMetrics(): string {
|
||||||
|
return JSON.stringify({
|
||||||
|
timestamp: Date.now(),
|
||||||
|
userAgent: navigator.userAgent,
|
||||||
|
metrics: this.metrics,
|
||||||
|
summary: this.getReport().summary
|
||||||
|
}, null, 2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Global performance monitor instance
|
||||||
|
export const performanceMonitor = new PerformanceMonitor()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* React hook for component performance tracking
|
||||||
|
*/
|
||||||
|
export function usePerformanceTracking(componentName: string) {
|
||||||
|
const [renderCount, setRenderCount] = React.useState(0)
|
||||||
|
|
||||||
|
React.useEffect(() => {
|
||||||
|
const newCount = renderCount + 1
|
||||||
|
setRenderCount(newCount)
|
||||||
|
performanceMonitor.trackComponentRender(componentName, newCount)
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
renderCount,
|
||||||
|
trackOperation: (name: string, metadata?: Record<string, any>) =>
|
||||||
|
performanceMonitor.startTiming(`${componentName}_${name}`, metadata),
|
||||||
|
|
||||||
|
trackApiCall: <T>(name: string, apiCall: () => Promise<T>) =>
|
||||||
|
performanceMonitor.trackApiCall(`${componentName}_${name}`, apiCall)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Debounce utility for performance optimization
|
||||||
|
*/
|
||||||
|
export function debounce<Args extends any[]>(
|
||||||
|
func: (...args: Args) => void,
|
||||||
|
delay: number
|
||||||
|
): (...args: Args) => void {
|
||||||
|
let timeoutId: NodeJS.Timeout | null = null
|
||||||
|
|
||||||
|
return (...args: Args) => {
|
||||||
|
if (timeoutId) {
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
}
|
||||||
|
|
||||||
|
timeoutId = setTimeout(() => {
|
||||||
|
func.apply(null, args)
|
||||||
|
}, delay)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Throttle utility for performance optimization
|
||||||
|
*/
|
||||||
|
export function throttle<Args extends any[]>(
|
||||||
|
func: (...args: Args) => void,
|
||||||
|
limit: number
|
||||||
|
): (...args: Args) => void {
|
||||||
|
let inThrottle = false
|
||||||
|
|
||||||
|
return (...args: Args) => {
|
||||||
|
if (!inThrottle) {
|
||||||
|
func.apply(null, args)
|
||||||
|
inThrottle = true
|
||||||
|
setTimeout(() => inThrottle = false, limit)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Memoization utility with performance tracking
|
||||||
|
*/
|
||||||
|
export function memoizeWithTracking<Args extends any[], Return>(
|
||||||
|
fn: (...args: Args) => Return,
|
||||||
|
keyGenerator?: (...args: Args) => string
|
||||||
|
): (...args: Args) => Return {
|
||||||
|
const cache = new Map<string, { result: Return; timestamp: number }>()
|
||||||
|
const cacheTimeout = 5 * 60 * 1000 // 5 minutes
|
||||||
|
|
||||||
|
return (...args: Args) => {
|
||||||
|
const key = keyGenerator ? keyGenerator(...args) : JSON.stringify(args)
|
||||||
|
const now = Date.now()
|
||||||
|
|
||||||
|
// Check cache
|
||||||
|
const cached = cache.get(key)
|
||||||
|
if (cached && (now - cached.timestamp) < cacheTimeout) {
|
||||||
|
performanceMonitor.recordMetric('memoize_hit', 1, { function: fn.name })
|
||||||
|
return cached.result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute result
|
||||||
|
const endTiming = performanceMonitor.startTiming('memoize_compute', { function: fn.name })
|
||||||
|
const result = fn(...args)
|
||||||
|
endTiming()
|
||||||
|
|
||||||
|
// Store in cache
|
||||||
|
cache.set(key, { result, timestamp: now })
|
||||||
|
performanceMonitor.recordMetric('memoize_miss', 1, { function: fn.name })
|
||||||
|
|
||||||
|
// Clean up old entries
|
||||||
|
if (cache.size > 100) {
|
||||||
|
const entries = Array.from(cache.entries())
|
||||||
|
entries
|
||||||
|
.filter(([, value]) => (now - value.timestamp) > cacheTimeout)
|
||||||
|
.forEach(([key]) => cache.delete(key))
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Web Vitals tracking
|
||||||
|
*/
|
||||||
|
export function trackWebVitals() {
|
||||||
|
if (typeof window === 'undefined') return
|
||||||
|
|
||||||
|
// Track Largest Contentful Paint
|
||||||
|
if ('PerformanceObserver' in window) {
|
||||||
|
try {
|
||||||
|
new PerformanceObserver((list) => {
|
||||||
|
list.getEntries().forEach((entry) => {
|
||||||
|
if (entry.entryType === 'largest-contentful-paint') {
|
||||||
|
performanceMonitor.recordMetric('lcp', entry.startTime)
|
||||||
|
}
|
||||||
|
if (entry.entryType === 'first-input') {
|
||||||
|
performanceMonitor.recordMetric('fid', (entry as any).processingStart - entry.startTime)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}).observe({ entryTypes: ['largest-contentful-paint', 'first-input'] })
|
||||||
|
} catch (error) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track Cumulative Layout Shift
|
||||||
|
if ('PerformanceObserver' in window) {
|
||||||
|
try {
|
||||||
|
let clsValue = 0
|
||||||
|
new PerformanceObserver((list) => {
|
||||||
|
list.getEntries().forEach((entry) => {
|
||||||
|
if (!(entry as any).hadRecentInput) {
|
||||||
|
clsValue += (entry as any).value
|
||||||
|
performanceMonitor.recordMetric('cls', clsValue)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}).observe({ entryTypes: ['layout-shift'] })
|
||||||
|
} catch (error) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
44
frontend/src/lib/playground-config.ts
Normal file
44
frontend/src/lib/playground-config.ts
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
// Centralized playground configuration
|
||||||
|
export const playgroundConfig = {
|
||||||
|
// Working models (avoiding rate-limited ones)
|
||||||
|
availableModels: [
|
||||||
|
{
|
||||||
|
id: 'openrouter-gpt-4',
|
||||||
|
name: 'GPT-4 (OpenRouter)',
|
||||||
|
provider: 'OpenRouter',
|
||||||
|
category: 'chat',
|
||||||
|
status: 'available'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'openrouter-claude-3-sonnet',
|
||||||
|
name: 'Claude 3 Sonnet (OpenRouter)',
|
||||||
|
provider: 'OpenRouter',
|
||||||
|
category: 'chat',
|
||||||
|
status: 'available'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
|
||||||
|
// Rate limited models to avoid
|
||||||
|
rateLimitedModels: [
|
||||||
|
'ollama-qwen3-235b',
|
||||||
|
'ollama-gemini-2.0-flash',
|
||||||
|
'ollama-gemini-2.5-pro'
|
||||||
|
],
|
||||||
|
|
||||||
|
// Default settings
|
||||||
|
defaults: {
|
||||||
|
model: 'openrouter-gpt-4',
|
||||||
|
temperature: 0.7,
|
||||||
|
maxTokens: 150,
|
||||||
|
systemPrompt: 'You are a helpful AI assistant.'
|
||||||
|
},
|
||||||
|
|
||||||
|
// Error handling
|
||||||
|
errorMessages: {
|
||||||
|
rateLimited: 'Model is currently rate limited. Please try another model.',
|
||||||
|
authFailed: 'Authentication failed. Please refresh the page.',
|
||||||
|
networkError: 'Network error. Please check your connection.'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default playgroundConfig
|
||||||
@@ -138,4 +138,3 @@ class TokenManager extends SimpleEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const tokenManager = new TokenManager()
|
export const tokenManager = new TokenManager()
|
||||||
|
|
||||||
|
|||||||
109
frontend/src/lib/url-utils.ts
Normal file
109
frontend/src/lib/url-utils.ts
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
/**
|
||||||
|
* URL utilities for handling HTTP/HTTPS protocol detection
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the base URL with proper protocol detection
|
||||||
|
* This ensures API calls use the same protocol as the page was loaded with
|
||||||
|
*/
|
||||||
|
export const getBaseUrl = (): string => {
|
||||||
|
if (typeof window !== 'undefined') {
|
||||||
|
// Client-side: detect current protocol
|
||||||
|
const protocol = window.location.protocol === 'https:' ? 'https' : 'http'
|
||||||
|
const host = process.env.NEXT_PUBLIC_BASE_URL || window.location.hostname
|
||||||
|
return `${protocol}://${host}`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Server-side: default based on environment
|
||||||
|
const protocol = process.env.NODE_ENV === 'production' ? 'https' : 'http'
|
||||||
|
return `${protocol}://${process.env.NEXT_PUBLIC_BASE_URL || 'localhost'}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the API URL with proper protocol detection
|
||||||
|
* This is the main function that should be used for all API calls
|
||||||
|
*/
|
||||||
|
export const getApiUrl = (): string => {
|
||||||
|
if (typeof window !== 'undefined') {
|
||||||
|
// Client-side: use the same protocol as the current page
|
||||||
|
const protocol = window.location.protocol.slice(0, -1) // Remove ':' from 'https:'
|
||||||
|
const host = window.location.hostname
|
||||||
|
return `${protocol}://${host}`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Server-side: default to HTTP for internal requests
|
||||||
|
return `http://${process.env.NEXT_PUBLIC_BASE_URL || 'localhost'}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the internal API URL for authenticated endpoints
|
||||||
|
* This ensures internal API calls use the same protocol as the page
|
||||||
|
*/
|
||||||
|
export const getInternalApiUrl = (): string => {
|
||||||
|
const baseUrl = getApiUrl()
|
||||||
|
return `${baseUrl}/api-internal`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the public API URL for external client endpoints
|
||||||
|
* This ensures public API calls use the same protocol as the page
|
||||||
|
*/
|
||||||
|
export const getPublicApiUrl = (): string => {
|
||||||
|
const baseUrl = getApiUrl()
|
||||||
|
return `${baseUrl}/api`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to make API calls with proper protocol
|
||||||
|
*/
|
||||||
|
export const apiFetch = async (
|
||||||
|
endpoint: string,
|
||||||
|
options: RequestInit = {}
|
||||||
|
): Promise<Response> => {
|
||||||
|
const baseUrl = getApiUrl()
|
||||||
|
const url = `${baseUrl}${endpoint}`
|
||||||
|
|
||||||
|
return fetch(url, {
|
||||||
|
...options,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...options.headers,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function for internal API calls
|
||||||
|
*/
|
||||||
|
export const internalApiFetch = async (
|
||||||
|
endpoint: string,
|
||||||
|
options: RequestInit = {}
|
||||||
|
): Promise<Response> => {
|
||||||
|
const url = `${getInternalApiUrl()}${endpoint}`
|
||||||
|
|
||||||
|
return fetch(url, {
|
||||||
|
...options,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...options.headers,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function for public API calls
|
||||||
|
*/
|
||||||
|
export const publicApiFetch = async (
|
||||||
|
endpoint: string,
|
||||||
|
options: RequestInit = {}
|
||||||
|
): Promise<Response> => {
|
||||||
|
const url = `${getPublicApiUrl()}${endpoint}`
|
||||||
|
|
||||||
|
return fetch(url, {
|
||||||
|
...options,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...options.headers,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -2,7 +2,7 @@ import { type ClassValue } from 'clsx'
|
|||||||
import { clsx } from 'clsx'
|
import { clsx } from 'clsx'
|
||||||
import { twMerge } from 'tailwind-merge'
|
import { twMerge } from 'tailwind-merge'
|
||||||
|
|
||||||
|
|
||||||
export function cn(...inputs: ClassValue[]) {
|
export function cn(...inputs: ClassValue[]) {
|
||||||
return twMerge(clsx(inputs))
|
return twMerge(clsx(inputs))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
289
frontend/src/lib/validation.ts
Normal file
289
frontend/src/lib/validation.ts
Normal file
@@ -0,0 +1,289 @@
|
|||||||
|
/**
|
||||||
|
* Validation utilities with TypeScript support
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { ValidationRule, ValidationRules, ValidationResult } from '@/types/chatbot'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates a single field against its rules
|
||||||
|
*/
|
||||||
|
export function validateField<T>(
|
||||||
|
value: T,
|
||||||
|
rules: ValidationRule<T> = {}
|
||||||
|
): string | null {
|
||||||
|
const {
|
||||||
|
required = false,
|
||||||
|
minLength,
|
||||||
|
maxLength,
|
||||||
|
min,
|
||||||
|
max,
|
||||||
|
pattern,
|
||||||
|
custom
|
||||||
|
} = rules
|
||||||
|
|
||||||
|
// Required validation
|
||||||
|
if (required) {
|
||||||
|
if (value === null || value === undefined) {
|
||||||
|
return 'This field is required'
|
||||||
|
}
|
||||||
|
if (typeof value === 'string' && value.trim().length === 0) {
|
||||||
|
return 'This field is required'
|
||||||
|
}
|
||||||
|
if (Array.isArray(value) && value.length === 0) {
|
||||||
|
return 'This field is required'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip other validations if value is empty and not required
|
||||||
|
if (!required && (value === null || value === undefined || value === '')) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
// String length validation
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
if (minLength !== undefined && value.length < minLength) {
|
||||||
|
return `Must be at least ${minLength} characters`
|
||||||
|
}
|
||||||
|
if (maxLength !== undefined && value.length > maxLength) {
|
||||||
|
return `Must be no more than ${maxLength} characters`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Number range validation
|
||||||
|
if (typeof value === 'number') {
|
||||||
|
if (min !== undefined && value < min) {
|
||||||
|
return `Must be at least ${min}`
|
||||||
|
}
|
||||||
|
if (max !== undefined && value > max) {
|
||||||
|
return `Must be no more than ${max}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Array length validation
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
if (minLength !== undefined && value.length < minLength) {
|
||||||
|
return `Must have at least ${minLength} items`
|
||||||
|
}
|
||||||
|
if (maxLength !== undefined && value.length > maxLength) {
|
||||||
|
return `Must have no more than ${maxLength} items`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pattern validation
|
||||||
|
if (typeof value === 'string' && pattern) {
|
||||||
|
if (!pattern.test(value)) {
|
||||||
|
return 'Invalid format'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Custom validation
|
||||||
|
if (custom) {
|
||||||
|
return custom(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates an entire object against validation rules
|
||||||
|
*/
|
||||||
|
export function validateObject<T extends Record<string, any>>(
|
||||||
|
obj: T,
|
||||||
|
rules: ValidationRules<T>
|
||||||
|
): ValidationResult {
|
||||||
|
const errors: Record<string, string> = {}
|
||||||
|
|
||||||
|
for (const [key, rule] of Object.entries(rules)) {
|
||||||
|
if (rule && key in obj) {
|
||||||
|
const error = validateField(obj[key], rule as ValidationRule<any>)
|
||||||
|
if (error) {
|
||||||
|
errors[key] = error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
isValid: Object.keys(errors).length === 0,
|
||||||
|
errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Common validation rules for chatbot fields
|
||||||
|
*/
|
||||||
|
export const chatbotValidationRules = {
|
||||||
|
name: {
|
||||||
|
required: true,
|
||||||
|
minLength: 1,
|
||||||
|
maxLength: 100,
|
||||||
|
custom: (value: string) => {
|
||||||
|
if (!/^[a-zA-Z0-9\s\-_]+$/.test(value)) {
|
||||||
|
return 'Name can only contain letters, numbers, spaces, hyphens, and underscores'
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
model: {
|
||||||
|
required: true,
|
||||||
|
minLength: 1,
|
||||||
|
maxLength: 100
|
||||||
|
},
|
||||||
|
|
||||||
|
system_prompt: {
|
||||||
|
maxLength: 4000,
|
||||||
|
custom: (value: string) => {
|
||||||
|
if (value && value.trim().length === 0) {
|
||||||
|
return 'System prompt cannot be only whitespace'
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
temperature: {
|
||||||
|
required: true,
|
||||||
|
min: 0,
|
||||||
|
max: 2
|
||||||
|
},
|
||||||
|
|
||||||
|
max_tokens: {
|
||||||
|
required: true,
|
||||||
|
min: 1,
|
||||||
|
max: 4000
|
||||||
|
},
|
||||||
|
|
||||||
|
memory_length: {
|
||||||
|
required: true,
|
||||||
|
min: 1,
|
||||||
|
max: 50
|
||||||
|
},
|
||||||
|
|
||||||
|
rag_top_k: {
|
||||||
|
required: true,
|
||||||
|
min: 1,
|
||||||
|
max: 20
|
||||||
|
},
|
||||||
|
|
||||||
|
fallback_responses: {
|
||||||
|
minLength: 1,
|
||||||
|
maxLength: 10,
|
||||||
|
custom: (responses: string[]) => {
|
||||||
|
if (responses.some(r => !r || r.trim().length === 0)) {
|
||||||
|
return 'All fallback responses must be non-empty'
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Email validation rule
|
||||||
|
*/
|
||||||
|
export const emailRule: ValidationRule<string> = {
|
||||||
|
pattern: /^[^\s@]+@[^\s@]+\.[^\s@]+$/,
|
||||||
|
custom: (value: string) => {
|
||||||
|
if (value && !emailRule.pattern?.test(value)) {
|
||||||
|
return 'Please enter a valid email address'
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* URL validation rule
|
||||||
|
*/
|
||||||
|
export const urlRule: ValidationRule<string> = {
|
||||||
|
pattern: /^https?:\/\/.+/,
|
||||||
|
custom: (value: string) => {
|
||||||
|
if (value && !urlRule.pattern?.test(value)) {
|
||||||
|
return 'Please enter a valid URL starting with http:// or https://'
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Username validation rule
|
||||||
|
*/
|
||||||
|
export const usernameRule: ValidationRule<string> = {
|
||||||
|
minLength: 3,
|
||||||
|
maxLength: 30,
|
||||||
|
pattern: /^[a-zA-Z0-9_-]+$/,
|
||||||
|
custom: (value: string) => {
|
||||||
|
if (value && !usernameRule.pattern?.test(value)) {
|
||||||
|
return 'Username can only contain letters, numbers, hyphens, and underscores'
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Password validation rule
|
||||||
|
*/
|
||||||
|
export const passwordRule: ValidationRule<string> = {
|
||||||
|
minLength: 8,
|
||||||
|
maxLength: 128,
|
||||||
|
custom: (value: string) => {
|
||||||
|
if (!value) return null
|
||||||
|
|
||||||
|
if (!/(?=.*[a-z])/.test(value)) {
|
||||||
|
return 'Password must contain at least one lowercase letter'
|
||||||
|
}
|
||||||
|
if (!/(?=.*[A-Z])/.test(value)) {
|
||||||
|
return 'Password must contain at least one uppercase letter'
|
||||||
|
}
|
||||||
|
if (!/(?=.*\d)/.test(value)) {
|
||||||
|
return 'Password must contain at least one number'
|
||||||
|
}
|
||||||
|
if (!/(?=.*[!@#$%^&*()_+\-=\[\]{};':"\\|,.<>\?])/.test(value)) {
|
||||||
|
return 'Password must contain at least one special character'
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility to create conditional validation rules
|
||||||
|
*/
|
||||||
|
export function when<T>(
|
||||||
|
condition: (obj: any) => boolean,
|
||||||
|
rules: ValidationRule<T>
|
||||||
|
): ValidationRule<T> {
|
||||||
|
return {
|
||||||
|
...rules,
|
||||||
|
custom: (value: T, obj?: any) => {
|
||||||
|
if (!condition(obj)) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const originalCustom = rules.custom
|
||||||
|
if (originalCustom) {
|
||||||
|
return originalCustom(value, obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
return validateField(value, { ...rules, custom: undefined })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Debounced validation for real-time form validation
|
||||||
|
*/
|
||||||
|
export function createDebouncedValidator<T extends Record<string, any>>(
|
||||||
|
rules: ValidationRules<T>,
|
||||||
|
delay: number = 300
|
||||||
|
) {
|
||||||
|
let timeoutId: NodeJS.Timeout | null = null
|
||||||
|
|
||||||
|
return (obj: T, callback: (result: ValidationResult) => void) => {
|
||||||
|
if (timeoutId) {
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
}
|
||||||
|
|
||||||
|
timeoutId = setTimeout(() => {
|
||||||
|
const result = validateObject(obj, rules)
|
||||||
|
callback(result)
|
||||||
|
}, delay)
|
||||||
|
}
|
||||||
|
}
|
||||||
1
frontend/tsconfig.tsbuildinfo
Normal file
1
frontend/tsconfig.tsbuildinfo
Normal file
File diff suppressed because one or more lines are too long
@@ -13,6 +13,7 @@ http {
|
|||||||
proxy_read_timeout 600;
|
proxy_read_timeout 600;
|
||||||
send_timeout 600;
|
send_timeout 600;
|
||||||
|
|
||||||
|
|
||||||
upstream backend {
|
upstream backend {
|
||||||
server enclava-backend:8000;
|
server enclava-backend:8000;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,6 +7,9 @@ http {
|
|||||||
server enclava-backend-test:8000;
|
server enclava-backend-test:8000;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
proxy_connect_timeout 60s;
|
||||||
|
proxy_send_timeout 300s;
|
||||||
|
proxy_read_timeout 300s;
|
||||||
# Frontend service disabled for simplified testing
|
# Frontend service disabled for simplified testing
|
||||||
|
|
||||||
# Logging configuration for tests
|
# Logging configuration for tests
|
||||||
@@ -41,10 +44,6 @@ http {
|
|||||||
proxy_buffering off;
|
proxy_buffering off;
|
||||||
proxy_request_buffering off;
|
proxy_request_buffering off;
|
||||||
|
|
||||||
# Timeouts for long-running requests
|
|
||||||
proxy_connect_timeout 60s;
|
|
||||||
proxy_send_timeout 60s;
|
|
||||||
proxy_read_timeout 60s;
|
|
||||||
|
|
||||||
# CORS headers for frontend
|
# CORS headers for frontend
|
||||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||||
@@ -77,9 +76,7 @@ http {
|
|||||||
proxy_request_buffering off;
|
proxy_request_buffering off;
|
||||||
|
|
||||||
# Timeouts for long-running requests (LLM streaming)
|
# Timeouts for long-running requests (LLM streaming)
|
||||||
proxy_connect_timeout 60s;
|
|
||||||
proxy_send_timeout 300s;
|
|
||||||
proxy_read_timeout 300s;
|
|
||||||
|
|
||||||
# CORS headers for external clients
|
# CORS headers for external clients
|
||||||
add_header 'Access-Control-Allow-Origin' '*' always;
|
add_header 'Access-Control-Allow-Origin' '*' always;
|
||||||
|
|||||||
@@ -1,90 +0,0 @@
|
|||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
|
||||||
# Uncomment the line below if you want the files to be prepended with date and time
|
|
||||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# path to migration scripts
|
|
||||||
script_location = migrations
|
|
||||||
|
|
||||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
|
||||||
# Uncomment the line below if you want the files to be prepended with date and time
|
|
||||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
|
||||||
# defaults to the current working directory.
|
|
||||||
prepend_sys_path = .
|
|
||||||
|
|
||||||
# timezone to use when rendering the date within the migration file
|
|
||||||
# as well as the filename.
|
|
||||||
# If specified, requires the python-dateutil library that can be installed by running "pip install alembic[tz]"
|
|
||||||
# timezone =
|
|
||||||
|
|
||||||
# max length of characters to apply to the
|
|
||||||
# "slug" field
|
|
||||||
# truncate_slug_length = 40
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
# set to 'true' to allow .pyc and .pyo files without
|
|
||||||
# a source .py file to be detected as revisions in the
|
|
||||||
# versions/ directory
|
|
||||||
# sourceless = false
|
|
||||||
|
|
||||||
# version path separator; As mentioned above, this is the character used to split
|
|
||||||
# version_locations. The default within new alembic.ini files is "os", which uses
|
|
||||||
# os.pathsep. If this key is omitted entirely, it falls back to the legacy
|
|
||||||
# behavior of splitting on spaces and/or commas.
|
|
||||||
# Valid values for version_path_separator are:
|
|
||||||
#
|
|
||||||
# version_path_separator = :
|
|
||||||
# version_path_separator = ;
|
|
||||||
# version_path_separator = space
|
|
||||||
version_path_separator = os
|
|
||||||
|
|
||||||
# set to 'true' to search source files recursively
|
|
||||||
# in each "version_locations" directory
|
|
||||||
# new in Alembic version 1.10
|
|
||||||
# recursive_version_locations = false
|
|
||||||
|
|
||||||
# the output encoding used when revision files
|
|
||||||
# are written from script.py.mako
|
|
||||||
# output_encoding = utf-8
|
|
||||||
|
|
||||||
# Logging configuration
|
|
||||||
[loggers]
|
|
||||||
keys = root,sqlalchemy,alembic
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level = WARN
|
|
||||||
handlers = console
|
|
||||||
qualname =
|
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
|
||||||
level = WARN
|
|
||||||
handlers =
|
|
||||||
qualname = sqlalchemy.engine
|
|
||||||
|
|
||||||
[logger_alembic]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = alembic
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class = StreamHandler
|
|
||||||
args = (sys.stderr,)
|
|
||||||
level = NOTSET
|
|
||||||
formatter = generic
|
|
||||||
|
|
||||||
[formatter_generic]
|
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
||||||
datefmt = %H:%M:%S
|
|
||||||
@@ -1,719 +0,0 @@
|
|||||||
"""
|
|
||||||
Zammad Plugin Implementation
|
|
||||||
Provides integration between Enclava platform and Zammad helpdesk system
|
|
||||||
"""
|
|
||||||
from typing import Dict, Any, List, Optional
|
|
||||||
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
|
|
||||||
from app.services.base_plugin import BasePlugin, PluginContext
|
|
||||||
from app.services.plugin_database import PluginDatabaseSession, plugin_db_manager
|
|
||||||
from app.services.plugin_security import plugin_security_policy_manager
|
|
||||||
from sqlalchemy import Column, String, DateTime, Text, Boolean, Integer, ForeignKey
|
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
|
|
||||||
class ZammadTicket(BaseModel):
|
|
||||||
"""Zammad ticket model"""
|
|
||||||
id: str
|
|
||||||
title: str
|
|
||||||
body: str
|
|
||||||
status: str
|
|
||||||
priority: str
|
|
||||||
customer_id: str
|
|
||||||
group_id: str
|
|
||||||
created_at: datetime
|
|
||||||
updated_at: datetime
|
|
||||||
ai_summary: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class ZammadConfiguration(BaseModel):
|
|
||||||
"""Zammad configuration model"""
|
|
||||||
name: str
|
|
||||||
zammad_url: str
|
|
||||||
api_token: str
|
|
||||||
chatbot_id: str
|
|
||||||
ai_summarization: Dict[str, Any]
|
|
||||||
sync_settings: Dict[str, Any]
|
|
||||||
webhook_settings: Dict[str, Any]
|
|
||||||
|
|
||||||
|
|
||||||
# Plugin database models
|
|
||||||
Base = declarative_base()
|
|
||||||
|
|
||||||
class ZammadConfiguration(Base):
|
|
||||||
__tablename__ = "zammad_configurations"
|
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
||||||
user_id = Column(String, nullable=False, index=True)
|
|
||||||
name = Column(String(100), nullable=False)
|
|
||||||
zammad_url = Column(String(500), nullable=False)
|
|
||||||
api_token_encrypted = Column(Text, nullable=False)
|
|
||||||
chatbot_id = Column(String(100), nullable=False)
|
|
||||||
is_active = Column(Boolean, default=True)
|
|
||||||
ai_summarization_enabled = Column(Boolean, default=True)
|
|
||||||
auto_summarize = Column(Boolean, default=True)
|
|
||||||
sync_enabled = Column(Boolean, default=True)
|
|
||||||
sync_interval_hours = Column(Integer, default=2)
|
|
||||||
created_at = Column(DateTime, default=datetime.now(timezone.utc))
|
|
||||||
updated_at = Column(DateTime, default=datetime.now(timezone.utc), onupdate=datetime.now(timezone.utc))
|
|
||||||
|
|
||||||
class ZammadTicket(Base):
|
|
||||||
__tablename__ = "zammad_tickets"
|
|
||||||
|
|
||||||
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
|
||||||
zammad_ticket_id = Column(String(50), nullable=False, index=True)
|
|
||||||
configuration_id = Column(UUID(as_uuid=True), ForeignKey("zammad_configurations.id"))
|
|
||||||
title = Column(String(500), nullable=False)
|
|
||||||
body = Column(Text)
|
|
||||||
status = Column(String(50))
|
|
||||||
priority = Column(String(50))
|
|
||||||
customer_id = Column(String(50))
|
|
||||||
group_id = Column(String(50))
|
|
||||||
ai_summary = Column(Text)
|
|
||||||
last_synced = Column(DateTime, default=datetime.now(timezone.utc))
|
|
||||||
created_at = Column(DateTime, default=datetime.now(timezone.utc))
|
|
||||||
updated_at = Column(DateTime, default=datetime.now(timezone.utc), onupdate=datetime.now(timezone.utc))
|
|
||||||
|
|
||||||
configuration = relationship("ZammadConfiguration", back_populates="tickets")
|
|
||||||
|
|
||||||
ZammadConfiguration.tickets = relationship("ZammadTicket", back_populates="configuration")
|
|
||||||
|
|
||||||
class ZammadPlugin(BasePlugin):
|
|
||||||
"""Zammad helpdesk integration plugin with full framework integration"""
|
|
||||||
|
|
||||||
def __init__(self, manifest, plugin_token: str):
|
|
||||||
super().__init__(manifest, plugin_token)
|
|
||||||
self.zammad_client = None
|
|
||||||
self.db_models = [ZammadConfiguration, ZammadTicket]
|
|
||||||
|
|
||||||
async def initialize(self) -> bool:
|
|
||||||
"""Initialize Zammad plugin with database setup"""
|
|
||||||
try:
|
|
||||||
self.logger.info("Initializing Zammad plugin")
|
|
||||||
|
|
||||||
# Create database tables
|
|
||||||
await self._create_database_tables()
|
|
||||||
|
|
||||||
# Test platform API connectivity
|
|
||||||
health = await self.api_client.get("/health")
|
|
||||||
self.logger.info(f"Platform API health: {health.get('status')}")
|
|
||||||
|
|
||||||
# Validate security policy
|
|
||||||
policy = plugin_security_policy_manager.get_security_policy(self.plugin_id, None)
|
|
||||||
self.logger.info(f"Security policy loaded: {policy.get('max_api_calls_per_minute')} calls/min")
|
|
||||||
|
|
||||||
self.logger.info("Zammad plugin initialized successfully")
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to initialize Zammad plugin: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def _create_database_tables(self):
|
|
||||||
"""Create plugin database tables"""
|
|
||||||
try:
|
|
||||||
engine = await plugin_db_manager.get_plugin_engine(self.plugin_id)
|
|
||||||
if engine:
|
|
||||||
async with engine.begin() as conn:
|
|
||||||
await conn.run_sync(Base.metadata.create_all)
|
|
||||||
self.logger.info("Database tables created successfully")
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to create database tables: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
async def cleanup(self) -> bool:
|
|
||||||
"""Cleanup plugin resources"""
|
|
||||||
try:
|
|
||||||
self.logger.info("Cleaning up Zammad plugin")
|
|
||||||
# Close any open connections
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Error during cleanup: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_api_router(self) -> APIRouter:
|
|
||||||
"""Return FastAPI router for Zammad endpoints"""
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
@router.get("/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Plugin health check endpoint"""
|
|
||||||
return await self.health_check()
|
|
||||||
|
|
||||||
@router.get("/tickets")
|
|
||||||
async def get_tickets(context: PluginContext = Depends(self.get_auth_context)):
|
|
||||||
"""Get tickets from Zammad"""
|
|
||||||
try:
|
|
||||||
self._track_request()
|
|
||||||
|
|
||||||
config = await self.get_active_config(context.user_id)
|
|
||||||
if not config:
|
|
||||||
raise HTTPException(status_code=404, detail="No Zammad configuration found")
|
|
||||||
|
|
||||||
tickets = await self.fetch_tickets_from_zammad(config)
|
|
||||||
return {"tickets": tickets, "count": len(tickets)}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self._track_request(success=False)
|
|
||||||
self.logger.error(f"Error fetching tickets: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
||||||
@router.get("/tickets/{ticket_id}")
|
|
||||||
async def get_ticket(ticket_id: str, context: PluginContext = Depends(self.get_auth_context)):
|
|
||||||
"""Get specific ticket from Zammad"""
|
|
||||||
try:
|
|
||||||
self._track_request()
|
|
||||||
|
|
||||||
config = await self.get_active_config(context.user_id)
|
|
||||||
if not config:
|
|
||||||
raise HTTPException(status_code=404, detail="No Zammad configuration found")
|
|
||||||
|
|
||||||
ticket = await self.fetch_ticket_from_zammad(config, ticket_id)
|
|
||||||
return {"ticket": ticket}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self._track_request(success=False)
|
|
||||||
self.logger.error(f"Error fetching ticket {ticket_id}: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
||||||
@router.post("/tickets/{ticket_id}/summarize")
|
|
||||||
async def summarize_ticket(
|
|
||||||
ticket_id: str,
|
|
||||||
background_tasks: BackgroundTasks,
|
|
||||||
context: PluginContext = Depends(self.get_auth_context)
|
|
||||||
):
|
|
||||||
"""Generate AI summary for ticket"""
|
|
||||||
try:
|
|
||||||
self._track_request()
|
|
||||||
|
|
||||||
config = await self.get_active_config(context.user_id)
|
|
||||||
if not config:
|
|
||||||
raise HTTPException(status_code=404, detail="No Zammad configuration found")
|
|
||||||
|
|
||||||
# Start summarization in background
|
|
||||||
background_tasks.add_task(
|
|
||||||
self.summarize_ticket_async,
|
|
||||||
config,
|
|
||||||
ticket_id,
|
|
||||||
context.user_id
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": "started",
|
|
||||||
"ticket_id": ticket_id,
|
|
||||||
"message": "AI summarization started in background"
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self._track_request(success=False)
|
|
||||||
self.logger.error(f"Error starting summarization for ticket {ticket_id}: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
||||||
@router.post("/webhooks/ticket-created")
|
|
||||||
async def handle_ticket_webhook(webhook_data: Dict[str, Any]):
|
|
||||||
"""Handle Zammad webhook for new tickets"""
|
|
||||||
try:
|
|
||||||
ticket_id = webhook_data.get("ticket", {}).get("id")
|
|
||||||
if not ticket_id:
|
|
||||||
raise HTTPException(status_code=400, detail="Invalid webhook data")
|
|
||||||
|
|
||||||
self.logger.info(f"Received webhook for ticket: {ticket_id}")
|
|
||||||
|
|
||||||
# Process webhook asynchronously
|
|
||||||
asyncio.create_task(self.process_ticket_webhook(webhook_data))
|
|
||||||
|
|
||||||
return {"status": "processed", "ticket_id": ticket_id}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Error processing webhook: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
||||||
@router.get("/configurations")
|
|
||||||
async def get_configurations(context: PluginContext = Depends(self.get_auth_context)):
|
|
||||||
"""Get user's Zammad configurations"""
|
|
||||||
try:
|
|
||||||
configs = await self.get_user_configurations(context.user_id)
|
|
||||||
return {"configurations": configs}
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Error fetching configurations: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
||||||
@router.post("/configurations")
|
|
||||||
async def create_configuration(
|
|
||||||
config_data: Dict[str, Any],
|
|
||||||
context: PluginContext = Depends(self.get_auth_context)
|
|
||||||
):
|
|
||||||
"""Create new Zammad configuration"""
|
|
||||||
try:
|
|
||||||
# Validate configuration against schema
|
|
||||||
schema = await self.get_configuration_schema()
|
|
||||||
is_valid, errors = await self.config.validate_config(config_data, schema)
|
|
||||||
|
|
||||||
if not is_valid:
|
|
||||||
raise HTTPException(status_code=400, detail=f"Invalid configuration: {errors}")
|
|
||||||
|
|
||||||
# Test connection before saving
|
|
||||||
connection_test = await self.test_zammad_connection(config_data)
|
|
||||||
if not connection_test["success"]:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail=f"Connection test failed: {connection_test['error']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Save configuration to plugin database
|
|
||||||
success = await self._save_configuration_to_db(config_data, context.user_id)
|
|
||||||
if not success:
|
|
||||||
raise HTTPException(status_code=500, detail="Failed to save configuration")
|
|
||||||
|
|
||||||
return {"status": "created", "config": {"name": config_data.get("name"), "zammad_url": config_data.get("zammad_url")}}
|
|
||||||
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Error creating configuration: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
||||||
@router.get("/statistics")
|
|
||||||
async def get_statistics(context: PluginContext = Depends(self.get_auth_context)):
|
|
||||||
"""Get plugin usage statistics"""
|
|
||||||
try:
|
|
||||||
stats = await self._get_plugin_statistics(context.user_id)
|
|
||||||
return stats
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Error getting statistics: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
||||||
@router.get("/tickets/sync")
|
|
||||||
async def sync_tickets_manual(context: PluginContext = Depends(self.get_auth_context)):
|
|
||||||
"""Manually trigger ticket sync"""
|
|
||||||
try:
|
|
||||||
result = await self._sync_user_tickets(context.user_id)
|
|
||||||
return {"status": "completed", "synced_count": result}
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Error syncing tickets: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
|
||||||
|
|
||||||
return router
|
|
||||||
|
|
||||||
# Plugin-specific methods
|
|
||||||
|
|
||||||
async def get_active_config(self, user_id: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Get active Zammad configuration for user from database"""
|
|
||||||
try:
|
|
||||||
async with PluginDatabaseSession(self.plugin_id, plugin_db_manager) as db:
|
|
||||||
config = await db.query(ZammadConfiguration).filter(
|
|
||||||
ZammadConfiguration.user_id == user_id,
|
|
||||||
ZammadConfiguration.is_active == True
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if config:
|
|
||||||
# Decrypt API token
|
|
||||||
from app.services.plugin_security import plugin_token_manager
|
|
||||||
api_token = plugin_token_manager.decrypt_plugin_secret(config.api_token_encrypted)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"id": str(config.id),
|
|
||||||
"name": config.name,
|
|
||||||
"zammad_url": config.zammad_url,
|
|
||||||
"api_token": api_token,
|
|
||||||
"chatbot_id": config.chatbot_id,
|
|
||||||
"ai_summarization": {
|
|
||||||
"enabled": config.ai_summarization_enabled,
|
|
||||||
"auto_summarize": config.auto_summarize
|
|
||||||
},
|
|
||||||
"sync_settings": {
|
|
||||||
"enabled": config.sync_enabled,
|
|
||||||
"interval_hours": config.sync_interval_hours
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return None
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to get active config: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def get_user_configurations(self, user_id: str) -> List[Dict[str, Any]]:
|
|
||||||
"""Get all configurations for user from database"""
|
|
||||||
try:
|
|
||||||
async with PluginDatabaseSession(self.plugin_id, plugin_db_manager) as db:
|
|
||||||
configs = await db.query(ZammadConfiguration).filter(
|
|
||||||
ZammadConfiguration.user_id == user_id
|
|
||||||
).all()
|
|
||||||
|
|
||||||
result = []
|
|
||||||
for config in configs:
|
|
||||||
result.append({
|
|
||||||
"id": str(config.id),
|
|
||||||
"name": config.name,
|
|
||||||
"zammad_url": config.zammad_url,
|
|
||||||
"chatbot_id": config.chatbot_id,
|
|
||||||
"is_active": config.is_active,
|
|
||||||
"created_at": config.created_at.isoformat(),
|
|
||||||
"updated_at": config.updated_at.isoformat()
|
|
||||||
})
|
|
||||||
|
|
||||||
return result
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to get user configurations: {e}")
|
|
||||||
return []
|
|
||||||
|
|
||||||
async def fetch_tickets_from_zammad(self, config: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
||||||
"""Fetch tickets from Zammad API"""
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Token {config['api_token']}",
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
}
|
|
||||||
|
|
||||||
async with session.get(
|
|
||||||
f"{config['zammad_url']}/api/v1/tickets",
|
|
||||||
headers=headers,
|
|
||||||
timeout=30
|
|
||||||
) as response:
|
|
||||||
if response.status != 200:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=response.status,
|
|
||||||
detail=f"Zammad API error: {await response.text()}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return await response.json()
|
|
||||||
|
|
||||||
async def fetch_ticket_from_zammad(self, config: Dict[str, Any], ticket_id: str) -> Dict[str, Any]:
|
|
||||||
"""Fetch specific ticket from Zammad"""
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Token {config['api_token']}",
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
}
|
|
||||||
|
|
||||||
async with session.get(
|
|
||||||
f"{config['zammad_url']}/api/v1/tickets/{ticket_id}",
|
|
||||||
headers=headers,
|
|
||||||
timeout=30
|
|
||||||
) as response:
|
|
||||||
if response.status != 200:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=response.status,
|
|
||||||
detail=f"Zammad API error: {await response.text()}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return await response.json()
|
|
||||||
|
|
||||||
async def summarize_ticket_async(self, config: Dict[str, Any], ticket_id: str, user_id: str):
|
|
||||||
"""Asynchronously summarize a ticket using platform AI"""
|
|
||||||
try:
|
|
||||||
# Get ticket details
|
|
||||||
ticket = await self.fetch_ticket_from_zammad(config, ticket_id)
|
|
||||||
|
|
||||||
# Use platform chatbot API for summarization
|
|
||||||
chatbot_response = await self.api_client.call_chatbot_api(
|
|
||||||
chatbot_id=config["chatbot_id"],
|
|
||||||
message=f"Summarize this support ticket:\n\nTitle: {ticket.get('title', '')}\n\nContent: {ticket.get('body', '')}"
|
|
||||||
)
|
|
||||||
|
|
||||||
summary = chatbot_response.get("response", "")
|
|
||||||
|
|
||||||
# TODO: Store summary in database
|
|
||||||
self.logger.info(f"Generated summary for ticket {ticket_id}: {summary[:100]}...")
|
|
||||||
|
|
||||||
# Update ticket in Zammad with summary
|
|
||||||
await self.update_ticket_summary(config, ticket_id, summary)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Error summarizing ticket {ticket_id}: {e}")
|
|
||||||
|
|
||||||
async def update_ticket_summary(self, config: Dict[str, Any], ticket_id: str, summary: str):
|
|
||||||
"""Update ticket with AI summary"""
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Token {config['api_token']}",
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
}
|
|
||||||
|
|
||||||
update_data = {
|
|
||||||
"note": f"AI Summary: {summary}"
|
|
||||||
}
|
|
||||||
|
|
||||||
async with session.put(
|
|
||||||
f"{config['zammad_url']}/api/v1/tickets/{ticket_id}",
|
|
||||||
headers=headers,
|
|
||||||
json=update_data,
|
|
||||||
timeout=30
|
|
||||||
) as response:
|
|
||||||
if response.status not in [200, 201]:
|
|
||||||
self.logger.error(f"Failed to update ticket {ticket_id} with summary")
|
|
||||||
|
|
||||||
async def test_zammad_connection(self, config: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Test connection to Zammad instance"""
|
|
||||||
try:
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Token {config['api_token']}",
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
}
|
|
||||||
|
|
||||||
async with session.get(
|
|
||||||
f"{config['zammad_url']}/api/v1/users/me",
|
|
||||||
headers=headers,
|
|
||||||
timeout=10
|
|
||||||
) as response:
|
|
||||||
if response.status == 200:
|
|
||||||
user_data = await response.json()
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"user": user_data.get("login", "unknown"),
|
|
||||||
"zammad_version": response.headers.get("X-Zammad-Version", "unknown")
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return {
|
|
||||||
"success": False,
|
|
||||||
"error": f"HTTP {response.status}: {await response.text()}"
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
"success": False,
|
|
||||||
"error": str(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
async def process_ticket_webhook(self, webhook_data: Dict[str, Any]):
|
|
||||||
"""Process ticket webhook asynchronously"""
|
|
||||||
try:
|
|
||||||
ticket_data = webhook_data.get("ticket", {})
|
|
||||||
ticket_id = ticket_data.get("id")
|
|
||||||
|
|
||||||
self.logger.info(f"Processing webhook for ticket {ticket_id}")
|
|
||||||
|
|
||||||
# TODO: Get configuration and auto-summarize if enabled
|
|
||||||
# This would require looking up the configuration associated with the webhook
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Error processing webhook: {e}")
|
|
||||||
|
|
||||||
# Cron job functions
|
|
||||||
|
|
||||||
async def sync_tickets_from_zammad(self) -> bool:
|
|
||||||
"""Sync tickets from Zammad (cron job)"""
|
|
||||||
try:
|
|
||||||
self.logger.info("Starting ticket sync from Zammad")
|
|
||||||
|
|
||||||
# TODO: Get all active configurations and sync tickets
|
|
||||||
# This would iterate through all user configurations
|
|
||||||
|
|
||||||
self.logger.info("Ticket sync completed successfully")
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Ticket sync failed: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def cleanup_old_summaries(self) -> bool:
|
|
||||||
"""Clean up old AI summaries (cron job)"""
|
|
||||||
try:
|
|
||||||
self.logger.info("Starting cleanup of old summaries")
|
|
||||||
|
|
||||||
# TODO: Clean up summaries older than retention period
|
|
||||||
|
|
||||||
self.logger.info("Summary cleanup completed")
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Summary cleanup failed: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def check_zammad_connection(self) -> bool:
|
|
||||||
"""Check Zammad connectivity (cron job)"""
|
|
||||||
try:
|
|
||||||
# TODO: Test all configured Zammad instances
|
|
||||||
self.logger.info("Zammad connectivity check completed")
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Connectivity check failed: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def generate_weekly_reports(self) -> bool:
|
|
||||||
"""Generate weekly reports (cron job)"""
|
|
||||||
try:
|
|
||||||
self.logger.info("Generating weekly reports")
|
|
||||||
|
|
||||||
# TODO: Generate and send weekly ticket reports
|
|
||||||
|
|
||||||
self.logger.info("Weekly reports generated successfully")
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Report generation failed: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Enhanced database integration methods
|
|
||||||
|
|
||||||
async def _save_configuration_to_db(self, config_data: Dict[str, Any], user_id: str) -> bool:
|
|
||||||
"""Save Zammad configuration to plugin database"""
|
|
||||||
try:
|
|
||||||
from app.services.plugin_security import plugin_token_manager
|
|
||||||
|
|
||||||
# Encrypt API token
|
|
||||||
encrypted_token = plugin_token_manager.encrypt_plugin_secret(config_data["api_token"])
|
|
||||||
|
|
||||||
async with PluginDatabaseSession(self.plugin_id, plugin_db_manager) as db:
|
|
||||||
# Deactivate existing configurations if this is set as active
|
|
||||||
if config_data.get("is_active", True):
|
|
||||||
await db.query(ZammadConfiguration).filter(
|
|
||||||
ZammadConfiguration.user_id == user_id,
|
|
||||||
ZammadConfiguration.is_active == True
|
|
||||||
).update({"is_active": False})
|
|
||||||
|
|
||||||
# Create new configuration
|
|
||||||
config = ZammadConfiguration(
|
|
||||||
user_id=user_id,
|
|
||||||
name=config_data["name"],
|
|
||||||
zammad_url=config_data["zammad_url"],
|
|
||||||
api_token_encrypted=encrypted_token,
|
|
||||||
chatbot_id=config_data["chatbot_id"],
|
|
||||||
is_active=config_data.get("is_active", True),
|
|
||||||
ai_summarization_enabled=config_data.get("ai_summarization", {}).get("enabled", True),
|
|
||||||
auto_summarize=config_data.get("ai_summarization", {}).get("auto_summarize", True),
|
|
||||||
sync_enabled=config_data.get("sync_settings", {}).get("enabled", True),
|
|
||||||
sync_interval_hours=config_data.get("sync_settings", {}).get("interval_hours", 2)
|
|
||||||
)
|
|
||||||
|
|
||||||
db.add(config)
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
self.logger.info(f"Saved Zammad configuration for user {user_id}")
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to save configuration: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def _get_plugin_statistics(self, user_id: str) -> Dict[str, Any]:
|
|
||||||
"""Get plugin usage statistics"""
|
|
||||||
try:
|
|
||||||
async with PluginDatabaseSession(self.plugin_id, plugin_db_manager) as db:
|
|
||||||
# Get configuration count
|
|
||||||
config_count = await db.query(ZammadConfiguration).filter(
|
|
||||||
ZammadConfiguration.user_id == user_id
|
|
||||||
).count()
|
|
||||||
|
|
||||||
# Get ticket count
|
|
||||||
ticket_count = await db.query(ZammadTicket).join(ZammadConfiguration).filter(
|
|
||||||
ZammadConfiguration.user_id == user_id
|
|
||||||
).count()
|
|
||||||
|
|
||||||
# Get tickets with AI summaries
|
|
||||||
summarized_count = await db.query(ZammadTicket).join(ZammadConfiguration).filter(
|
|
||||||
ZammadConfiguration.user_id == user_id,
|
|
||||||
ZammadTicket.ai_summary.isnot(None)
|
|
||||||
).count()
|
|
||||||
|
|
||||||
# Get recent activity (last 7 days)
|
|
||||||
from datetime import timedelta
|
|
||||||
week_ago = datetime.now(timezone.utc) - timedelta(days=7)
|
|
||||||
recent_tickets = await db.query(ZammadTicket).join(ZammadConfiguration).filter(
|
|
||||||
ZammadConfiguration.user_id == user_id,
|
|
||||||
ZammadTicket.last_synced >= week_ago
|
|
||||||
).count()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"configurations": config_count,
|
|
||||||
"total_tickets": ticket_count,
|
|
||||||
"tickets_with_summaries": summarized_count,
|
|
||||||
"recent_tickets": recent_tickets,
|
|
||||||
"summary_rate": round((summarized_count / max(ticket_count, 1)) * 100, 1),
|
|
||||||
"last_sync": datetime.now(timezone.utc).isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to get statistics: {e}")
|
|
||||||
return {
|
|
||||||
"error": str(e),
|
|
||||||
"configurations": 0,
|
|
||||||
"total_tickets": 0,
|
|
||||||
"tickets_with_summaries": 0,
|
|
||||||
"recent_tickets": 0,
|
|
||||||
"summary_rate": 0.0
|
|
||||||
}
|
|
||||||
|
|
||||||
async def _sync_user_tickets(self, user_id: str) -> int:
|
|
||||||
"""Sync tickets for a specific user"""
|
|
||||||
try:
|
|
||||||
config = await self.get_active_config(user_id)
|
|
||||||
if not config:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
# Fetch tickets from Zammad
|
|
||||||
tickets = await self.fetch_tickets_from_zammad(config)
|
|
||||||
synced_count = 0
|
|
||||||
|
|
||||||
async with PluginDatabaseSession(self.plugin_id, plugin_db_manager) as db:
|
|
||||||
config_record = await db.query(ZammadConfiguration).filter(
|
|
||||||
ZammadConfiguration.id == config["id"]
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if not config_record:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
for ticket_data in tickets:
|
|
||||||
# Check if ticket already exists
|
|
||||||
existing_ticket = await db.query(ZammadTicket).filter(
|
|
||||||
ZammadTicket.zammad_ticket_id == str(ticket_data["id"]),
|
|
||||||
ZammadTicket.configuration_id == config_record.id
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if existing_ticket:
|
|
||||||
# Update existing ticket
|
|
||||||
existing_ticket.title = ticket_data.get("title", "")
|
|
||||||
existing_ticket.body = ticket_data.get("body", "")
|
|
||||||
existing_ticket.status = ticket_data.get("state", "")
|
|
||||||
existing_ticket.priority = ticket_data.get("priority", "")
|
|
||||||
existing_ticket.last_synced = datetime.now(timezone.utc)
|
|
||||||
existing_ticket.updated_at = datetime.now(timezone.utc)
|
|
||||||
else:
|
|
||||||
# Create new ticket
|
|
||||||
new_ticket = ZammadTicket(
|
|
||||||
zammad_ticket_id=str(ticket_data["id"]),
|
|
||||||
configuration_id=config_record.id,
|
|
||||||
title=ticket_data.get("title", ""),
|
|
||||||
body=ticket_data.get("body", ""),
|
|
||||||
status=ticket_data.get("state", ""),
|
|
||||||
priority=ticket_data.get("priority", ""),
|
|
||||||
customer_id=str(ticket_data.get("customer_id", "")),
|
|
||||||
group_id=str(ticket_data.get("group_id", "")),
|
|
||||||
last_synced=datetime.now(timezone.utc)
|
|
||||||
)
|
|
||||||
db.add(new_ticket)
|
|
||||||
synced_count += 1
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
self.logger.info(f"Synced {synced_count} new tickets for user {user_id}")
|
|
||||||
return synced_count
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to sync tickets for user {user_id}: {e}")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
async def _store_ticket_summary(self, ticket_id: str, summary: str, config_id: str):
|
|
||||||
"""Store AI-generated summary in database"""
|
|
||||||
try:
|
|
||||||
async with PluginDatabaseSession(self.plugin_id, plugin_db_manager) as db:
|
|
||||||
ticket = await db.query(ZammadTicket).filter(
|
|
||||||
ZammadTicket.zammad_ticket_id == ticket_id,
|
|
||||||
ZammadTicket.configuration_id == config_id
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if ticket:
|
|
||||||
ticket.ai_summary = summary
|
|
||||||
ticket.updated_at = datetime.now(timezone.utc)
|
|
||||||
await db.commit()
|
|
||||||
self.logger.info(f"Stored AI summary for ticket {ticket_id}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to store summary for ticket {ticket_id}: {e}")
|
|
||||||
@@ -1,253 +0,0 @@
|
|||||||
apiVersion: "v1"
|
|
||||||
kind: "Plugin"
|
|
||||||
metadata:
|
|
||||||
name: "zammad"
|
|
||||||
version: "1.0.0"
|
|
||||||
description: "Zammad helpdesk integration with AI summarization and ticket management"
|
|
||||||
author: "Enclava Team"
|
|
||||||
license: "MIT"
|
|
||||||
homepage: "https://github.com/enclava/plugins/zammad"
|
|
||||||
repository: "https://github.com/enclava/plugins/zammad"
|
|
||||||
tags:
|
|
||||||
- "helpdesk"
|
|
||||||
- "ticket-management"
|
|
||||||
- "ai-summarization"
|
|
||||||
- "integration"
|
|
||||||
|
|
||||||
spec:
|
|
||||||
runtime:
|
|
||||||
python_version: "3.11"
|
|
||||||
dependencies:
|
|
||||||
- "aiohttp>=3.8.0"
|
|
||||||
- "pydantic>=2.0.0"
|
|
||||||
- "httpx>=0.24.0"
|
|
||||||
- "python-dateutil>=2.8.0"
|
|
||||||
environment_variables:
|
|
||||||
ZAMMAD_TIMEOUT: "30"
|
|
||||||
ZAMMAD_MAX_RETRIES: "3"
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
platform_apis:
|
|
||||||
- "chatbot:invoke"
|
|
||||||
- "rag:query"
|
|
||||||
- "llm:completion"
|
|
||||||
- "llm:embeddings"
|
|
||||||
plugin_scopes:
|
|
||||||
- "tickets:read"
|
|
||||||
- "tickets:write"
|
|
||||||
- "tickets:summarize"
|
|
||||||
- "webhooks:receive"
|
|
||||||
- "config:manage"
|
|
||||||
- "sync:execute"
|
|
||||||
external_domains:
|
|
||||||
- "*.zammad.com"
|
|
||||||
- "*.zammad.org"
|
|
||||||
- "api.zammad.org"
|
|
||||||
|
|
||||||
database:
|
|
||||||
schema: "plugin_zammad"
|
|
||||||
migrations_path: "./migrations"
|
|
||||||
auto_migrate: true
|
|
||||||
|
|
||||||
api_endpoints:
|
|
||||||
- path: "/tickets"
|
|
||||||
methods: ["GET", "POST"]
|
|
||||||
description: "List and create Zammad tickets"
|
|
||||||
auth_required: true
|
|
||||||
|
|
||||||
- path: "/tickets/{ticket_id}"
|
|
||||||
methods: ["GET", "PUT", "DELETE"]
|
|
||||||
description: "Get, update, or delete specific ticket"
|
|
||||||
auth_required: true
|
|
||||||
|
|
||||||
- path: "/tickets/{ticket_id}/summarize"
|
|
||||||
methods: ["POST"]
|
|
||||||
description: "Generate AI summary for ticket"
|
|
||||||
auth_required: true
|
|
||||||
|
|
||||||
- path: "/tickets/{ticket_id}/articles"
|
|
||||||
methods: ["GET", "POST"]
|
|
||||||
description: "Get ticket articles or add new article"
|
|
||||||
auth_required: true
|
|
||||||
|
|
||||||
- path: "/webhooks/ticket-created"
|
|
||||||
methods: ["POST"]
|
|
||||||
description: "Handle Zammad webhook for new tickets"
|
|
||||||
auth_required: false
|
|
||||||
|
|
||||||
- path: "/webhooks/ticket-updated"
|
|
||||||
methods: ["POST"]
|
|
||||||
description: "Handle Zammad webhook for updated tickets"
|
|
||||||
auth_required: false
|
|
||||||
|
|
||||||
- path: "/configurations"
|
|
||||||
methods: ["GET", "POST", "PUT", "DELETE"]
|
|
||||||
description: "Manage Zammad configurations"
|
|
||||||
auth_required: true
|
|
||||||
|
|
||||||
- path: "/configurations/{config_id}/test"
|
|
||||||
methods: ["POST"]
|
|
||||||
description: "Test Zammad configuration connection"
|
|
||||||
auth_required: true
|
|
||||||
|
|
||||||
- path: "/statistics"
|
|
||||||
methods: ["GET"]
|
|
||||||
description: "Get plugin usage statistics"
|
|
||||||
auth_required: true
|
|
||||||
|
|
||||||
- path: "/health"
|
|
||||||
methods: ["GET"]
|
|
||||||
description: "Plugin health check"
|
|
||||||
auth_required: false
|
|
||||||
|
|
||||||
cron_jobs:
|
|
||||||
- name: "sync_tickets"
|
|
||||||
schedule: "0 */2 * * *"
|
|
||||||
function: "sync_tickets_from_zammad"
|
|
||||||
description: "Sync tickets from Zammad every 2 hours"
|
|
||||||
enabled: true
|
|
||||||
timeout_seconds: 600
|
|
||||||
max_retries: 3
|
|
||||||
|
|
||||||
- name: "cleanup_summaries"
|
|
||||||
schedule: "0 3 * * 0"
|
|
||||||
function: "cleanup_old_summaries"
|
|
||||||
description: "Clean up old AI summaries weekly"
|
|
||||||
enabled: true
|
|
||||||
timeout_seconds: 300
|
|
||||||
max_retries: 1
|
|
||||||
|
|
||||||
- name: "health_check"
|
|
||||||
schedule: "*/15 * * * *"
|
|
||||||
function: "check_zammad_connection"
|
|
||||||
description: "Check Zammad API connectivity every 15 minutes"
|
|
||||||
enabled: true
|
|
||||||
timeout_seconds: 60
|
|
||||||
max_retries: 2
|
|
||||||
|
|
||||||
- name: "generate_reports"
|
|
||||||
schedule: "0 9 * * 1"
|
|
||||||
function: "generate_weekly_reports"
|
|
||||||
description: "Generate weekly ticket reports"
|
|
||||||
enabled: false
|
|
||||||
timeout_seconds: 900
|
|
||||||
max_retries: 2
|
|
||||||
|
|
||||||
ui_config:
|
|
||||||
configuration_schema: "./config_schema.json"
|
|
||||||
ui_components: "./ui/components"
|
|
||||||
pages:
|
|
||||||
- name: "dashboard"
|
|
||||||
path: "/plugins/zammad"
|
|
||||||
component: "ZammadDashboard"
|
|
||||||
|
|
||||||
- name: "settings"
|
|
||||||
path: "/plugins/zammad/settings"
|
|
||||||
component: "ZammadSettings"
|
|
||||||
|
|
||||||
- name: "tickets"
|
|
||||||
path: "/plugins/zammad/tickets"
|
|
||||||
component: "ZammadTicketList"
|
|
||||||
|
|
||||||
- name: "analytics"
|
|
||||||
path: "/plugins/zammad/analytics"
|
|
||||||
component: "ZammadAnalytics"
|
|
||||||
|
|
||||||
external_services:
|
|
||||||
allowed_domains:
|
|
||||||
- "*.zammad.com"
|
|
||||||
- "*.zammad.org"
|
|
||||||
- "api.zammad.org"
|
|
||||||
- "help.zammad.com"
|
|
||||||
|
|
||||||
webhooks:
|
|
||||||
- endpoint: "/webhooks/ticket-created"
|
|
||||||
security: "signature_validation"
|
|
||||||
|
|
||||||
- endpoint: "/webhooks/ticket-updated"
|
|
||||||
security: "signature_validation"
|
|
||||||
|
|
||||||
rate_limits:
|
|
||||||
"*.zammad.com": 100
|
|
||||||
"*.zammad.org": 100
|
|
||||||
"api.zammad.org": 200
|
|
||||||
|
|
||||||
config_schema:
|
|
||||||
type: "object"
|
|
||||||
required:
|
|
||||||
- "zammad_url"
|
|
||||||
- "api_token"
|
|
||||||
- "chatbot_id"
|
|
||||||
properties:
|
|
||||||
zammad_url:
|
|
||||||
type: "string"
|
|
||||||
format: "uri"
|
|
||||||
title: "Zammad URL"
|
|
||||||
description: "The base URL of your Zammad instance"
|
|
||||||
examples:
|
|
||||||
- "https://company.zammad.com"
|
|
||||||
- "https://support.example.com"
|
|
||||||
|
|
||||||
api_token:
|
|
||||||
type: "string"
|
|
||||||
title: "API Token"
|
|
||||||
description: "Zammad API token with ticket read/write permissions"
|
|
||||||
minLength: 20
|
|
||||||
format: "password"
|
|
||||||
|
|
||||||
chatbot_id:
|
|
||||||
type: "string"
|
|
||||||
title: "Chatbot ID"
|
|
||||||
description: "Platform chatbot ID for AI summarization"
|
|
||||||
examples:
|
|
||||||
- "zammad-summarizer"
|
|
||||||
- "ticket-assistant"
|
|
||||||
|
|
||||||
ai_summarization:
|
|
||||||
type: "object"
|
|
||||||
title: "AI Summarization Settings"
|
|
||||||
properties:
|
|
||||||
enabled:
|
|
||||||
type: "boolean"
|
|
||||||
title: "Enable AI Summarization"
|
|
||||||
description: "Automatically summarize tickets using AI"
|
|
||||||
default: true
|
|
||||||
|
|
||||||
model:
|
|
||||||
type: "string"
|
|
||||||
title: "AI Model"
|
|
||||||
description: "LLM model to use for summarization"
|
|
||||||
default: "gpt-3.5-turbo"
|
|
||||||
|
|
||||||
max_tokens:
|
|
||||||
type: "integer"
|
|
||||||
title: "Max Summary Tokens"
|
|
||||||
description: "Maximum tokens for AI summary"
|
|
||||||
minimum: 50
|
|
||||||
maximum: 500
|
|
||||||
default: 150
|
|
||||||
|
|
||||||
draft_settings:
|
|
||||||
type: "object"
|
|
||||||
title: "AI Draft Settings"
|
|
||||||
properties:
|
|
||||||
enabled:
|
|
||||||
type: "boolean"
|
|
||||||
title: "Enable AI Drafts"
|
|
||||||
description: "Generate AI draft responses for tickets"
|
|
||||||
default: false
|
|
||||||
|
|
||||||
model:
|
|
||||||
type: "string"
|
|
||||||
title: "Draft Model"
|
|
||||||
description: "LLM model to use for draft generation"
|
|
||||||
default: "gpt-3.5-turbo"
|
|
||||||
|
|
||||||
max_tokens:
|
|
||||||
type: "integer"
|
|
||||||
title: "Max Draft Tokens"
|
|
||||||
description: "Maximum tokens for AI draft responses"
|
|
||||||
minimum: 100
|
|
||||||
maximum: 1000
|
|
||||||
default: 300
|
|
||||||
|
|
||||||
@@ -1,85 +0,0 @@
|
|||||||
"""Alembic environment for Zammad plugin"""
|
|
||||||
from logging.config import fileConfig
|
|
||||||
from sqlalchemy import engine_from_config
|
|
||||||
from sqlalchemy import pool
|
|
||||||
from alembic import context
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
|
||||||
# access to the values within the .ini file in use.
|
|
||||||
config = context.config
|
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
|
||||||
# This line sets up loggers basically.
|
|
||||||
if config.config_file_name is not None:
|
|
||||||
fileConfig(config.config_file_name)
|
|
||||||
|
|
||||||
# add your model's MetaData object here
|
|
||||||
# for 'autogenerate' support
|
|
||||||
from main import Base
|
|
||||||
target_metadata = Base.metadata
|
|
||||||
|
|
||||||
# other values from the config, defined by the needs of env.py,
|
|
||||||
# can be acquired:
|
|
||||||
# my_important_option = config.get_main_option("my_important_option")
|
|
||||||
# ... etc.
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_offline() -> None:
|
|
||||||
"""Run migrations in 'offline' mode.
|
|
||||||
|
|
||||||
This configures the context with just a URL
|
|
||||||
and not an Engine, though an Engine is acceptable
|
|
||||||
here as well. By skipping the Engine creation
|
|
||||||
we don't even need a DBAPI to be available.
|
|
||||||
|
|
||||||
Calls to context.execute() here emit the given string to the
|
|
||||||
script output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# Get database URL from environment variable
|
|
||||||
url = os.getenv("DATABASE_URL")
|
|
||||||
context.configure(
|
|
||||||
url=url,
|
|
||||||
target_metadata=target_metadata,
|
|
||||||
literal_binds=True,
|
|
||||||
dialect_opts={"paramstyle": "named"},
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_online() -> None:
|
|
||||||
"""Run migrations in 'online' mode.
|
|
||||||
|
|
||||||
In this scenario we need to create an Engine
|
|
||||||
and associate a connection with the context.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# Get database URL from environment variable
|
|
||||||
database_url = os.getenv("DATABASE_URL")
|
|
||||||
|
|
||||||
configuration = config.get_section(config.config_ini_section)
|
|
||||||
configuration["sqlalchemy.url"] = database_url
|
|
||||||
|
|
||||||
connectable = engine_from_config(
|
|
||||||
configuration,
|
|
||||||
prefix="sqlalchemy.",
|
|
||||||
poolclass=pool.NullPool,
|
|
||||||
)
|
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
|
||||||
context.configure(
|
|
||||||
connection=connection, target_metadata=target_metadata
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
if context.is_offline_mode():
|
|
||||||
run_migrations_offline()
|
|
||||||
else:
|
|
||||||
run_migrations_online()
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
"""${message}
|
|
||||||
|
|
||||||
Revision ID: ${up_revision}
|
|
||||||
Revises: ${down_revision | comma,n}
|
|
||||||
Create Date: ${create_date}
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
${imports if imports else ""}
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = ${repr(up_revision)}
|
|
||||||
down_revision = ${repr(down_revision)}
|
|
||||||
branch_labels = ${repr(branch_labels)}
|
|
||||||
depends_on = ${repr(depends_on)}
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
${upgrades if upgrades else "pass"}
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
${downgrades if downgrades else "pass"}
|
|
||||||
@@ -1,112 +0,0 @@
|
|||||||
"""Initial Zammad plugin schema
|
|
||||||
|
|
||||||
Revision ID: 001
|
|
||||||
Revises:
|
|
||||||
Create Date: 2024-12-22 12:00:00.000000
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '001'
|
|
||||||
down_revision = None
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Create initial Zammad plugin schema"""
|
|
||||||
|
|
||||||
# Create zammad_configurations table
|
|
||||||
op.create_table(
|
|
||||||
'zammad_configurations',
|
|
||||||
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
|
|
||||||
sa.Column('user_id', sa.String(255), nullable=False),
|
|
||||||
sa.Column('name', sa.String(100), nullable=False),
|
|
||||||
sa.Column('zammad_url', sa.String(500), nullable=False),
|
|
||||||
sa.Column('api_token_encrypted', sa.Text(), nullable=False),
|
|
||||||
sa.Column('chatbot_id', sa.String(100), nullable=False),
|
|
||||||
sa.Column('is_active', sa.Boolean(), nullable=False, server_default=sa.text('TRUE')),
|
|
||||||
sa.Column('ai_summarization_enabled', sa.Boolean(), nullable=False, server_default=sa.text('TRUE')),
|
|
||||||
sa.Column('auto_summarize', sa.Boolean(), nullable=False, server_default=sa.text('TRUE')),
|
|
||||||
sa.Column('sync_enabled', sa.Boolean(), nullable=False, server_default=sa.text('TRUE')),
|
|
||||||
sa.Column('sync_interval_hours', sa.Integer(), nullable=False, server_default=sa.text('2')),
|
|
||||||
sa.Column('created_at', sa.TIMESTAMP(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
|
||||||
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create zammad_tickets table
|
|
||||||
op.create_table(
|
|
||||||
'zammad_tickets',
|
|
||||||
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
|
|
||||||
sa.Column('zammad_ticket_id', sa.String(50), nullable=False),
|
|
||||||
sa.Column('configuration_id', UUID(as_uuid=True), nullable=True),
|
|
||||||
sa.Column('title', sa.String(500), nullable=False),
|
|
||||||
sa.Column('body', sa.Text(), nullable=True),
|
|
||||||
sa.Column('status', sa.String(50), nullable=True),
|
|
||||||
sa.Column('priority', sa.String(50), nullable=True),
|
|
||||||
sa.Column('customer_id', sa.String(50), nullable=True),
|
|
||||||
sa.Column('group_id', sa.String(50), nullable=True),
|
|
||||||
sa.Column('ai_summary', sa.Text(), nullable=True),
|
|
||||||
sa.Column('last_synced', sa.TIMESTAMP(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
|
||||||
sa.Column('created_at', sa.TIMESTAMP(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
|
||||||
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), nullable=False, server_default=sa.text('CURRENT_TIMESTAMP')),
|
|
||||||
sa.ForeignKeyConstraint(['configuration_id'], ['zammad_configurations.id'], ondelete='CASCADE'),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create indexes for performance
|
|
||||||
op.create_index('idx_zammad_configurations_user_id', 'zammad_configurations', ['user_id'])
|
|
||||||
op.create_index('idx_zammad_configurations_user_active', 'zammad_configurations', ['user_id', 'is_active'])
|
|
||||||
|
|
||||||
op.create_index('idx_zammad_tickets_zammad_id', 'zammad_tickets', ['zammad_ticket_id'])
|
|
||||||
op.create_index('idx_zammad_tickets_config_id', 'zammad_tickets', ['configuration_id'])
|
|
||||||
op.create_index('idx_zammad_tickets_status', 'zammad_tickets', ['status'])
|
|
||||||
op.create_index('idx_zammad_tickets_last_synced', 'zammad_tickets', ['last_synced'])
|
|
||||||
|
|
||||||
# Create updated_at trigger function if it doesn't exist
|
|
||||||
op.execute("""
|
|
||||||
CREATE OR REPLACE FUNCTION update_updated_at_column()
|
|
||||||
RETURNS TRIGGER AS $$
|
|
||||||
BEGIN
|
|
||||||
NEW.updated_at = CURRENT_TIMESTAMP;
|
|
||||||
RETURN NEW;
|
|
||||||
END;
|
|
||||||
$$ LANGUAGE 'plpgsql';
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Create triggers to automatically update updated_at columns
|
|
||||||
op.execute("""
|
|
||||||
CREATE TRIGGER update_zammad_configurations_updated_at
|
|
||||||
BEFORE UPDATE ON zammad_configurations
|
|
||||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
|
||||||
""")
|
|
||||||
|
|
||||||
op.execute("""
|
|
||||||
CREATE TRIGGER update_zammad_tickets_updated_at
|
|
||||||
BEFORE UPDATE ON zammad_tickets
|
|
||||||
FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();
|
|
||||||
""")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Drop Zammad plugin schema"""
|
|
||||||
|
|
||||||
# Drop triggers first
|
|
||||||
op.execute("DROP TRIGGER IF EXISTS update_zammad_tickets_updated_at ON zammad_tickets;")
|
|
||||||
op.execute("DROP TRIGGER IF EXISTS update_zammad_configurations_updated_at ON zammad_configurations;")
|
|
||||||
|
|
||||||
# Drop indexes
|
|
||||||
op.drop_index('idx_zammad_tickets_last_synced')
|
|
||||||
op.drop_index('idx_zammad_tickets_status')
|
|
||||||
op.drop_index('idx_zammad_tickets_config_id')
|
|
||||||
op.drop_index('idx_zammad_tickets_zammad_id')
|
|
||||||
op.drop_index('idx_zammad_configurations_user_active')
|
|
||||||
op.drop_index('idx_zammad_configurations_user_id')
|
|
||||||
|
|
||||||
# Drop tables (tickets first due to foreign key)
|
|
||||||
op.drop_table('zammad_tickets')
|
|
||||||
op.drop_table('zammad_configurations')
|
|
||||||
|
|
||||||
# Note: We don't drop the update_updated_at_column function as it might be used by other tables
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
aiohttp>=3.8.0
|
|
||||||
pydantic>=2.0.0
|
|
||||||
httpx>=0.24.0
|
|
||||||
python-dateutil>=2.8.0
|
|
||||||
@@ -1,414 +0,0 @@
|
|||||||
/**
|
|
||||||
* Zammad Plugin Dashboard Component
|
|
||||||
* Main dashboard for Zammad plugin showing tickets, statistics, and quick actions
|
|
||||||
*/
|
|
||||||
import React, { useState, useEffect } from 'react';
|
|
||||||
import {
|
|
||||||
Box,
|
|
||||||
Grid,
|
|
||||||
Card,
|
|
||||||
CardContent,
|
|
||||||
Typography,
|
|
||||||
Button,
|
|
||||||
Chip,
|
|
||||||
Alert,
|
|
||||||
Table,
|
|
||||||
TableBody,
|
|
||||||
TableCell,
|
|
||||||
TableHead,
|
|
||||||
TableRow,
|
|
||||||
IconButton,
|
|
||||||
Dialog,
|
|
||||||
DialogTitle,
|
|
||||||
DialogContent,
|
|
||||||
DialogActions,
|
|
||||||
LinearProgress,
|
|
||||||
Tooltip
|
|
||||||
} from '@mui/material';
|
|
||||||
import {
|
|
||||||
Refresh as RefreshIcon,
|
|
||||||
Sync as SyncIcon,
|
|
||||||
Analytics as AnalyticsIcon,
|
|
||||||
Assignment as TicketIcon,
|
|
||||||
AutoAwesome as AIIcon,
|
|
||||||
Settings as SettingsIcon,
|
|
||||||
OpenInNew as OpenIcon
|
|
||||||
} from '@mui/icons-material';
|
|
||||||
|
|
||||||
interface ZammadTicket {
|
|
||||||
id: string;
|
|
||||||
title: string;
|
|
||||||
status: string;
|
|
||||||
priority: string;
|
|
||||||
customer_id: string;
|
|
||||||
created_at: string;
|
|
||||||
ai_summary?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ZammadStats {
|
|
||||||
configurations: number;
|
|
||||||
total_tickets: number;
|
|
||||||
tickets_with_summaries: number;
|
|
||||||
recent_tickets: number;
|
|
||||||
summary_rate: number;
|
|
||||||
last_sync: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const ZammadDashboard: React.FC = () => {
|
|
||||||
const [tickets, setTickets] = useState<ZammadTicket[]>([]);
|
|
||||||
const [stats, setStats] = useState<ZammadStats | null>(null);
|
|
||||||
const [loading, setLoading] = useState(false);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
const [selectedTicket, setSelectedTicket] = useState<ZammadTicket | null>(null);
|
|
||||||
const [dialogOpen, setDialogOpen] = useState(false);
|
|
||||||
const [syncing, setSyncing] = useState(false);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
loadDashboardData();
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const loadDashboardData = async () => {
|
|
||||||
setLoading(true);
|
|
||||||
setError(null);
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Load statistics
|
|
||||||
const statsResponse = await fetch('/api/v1/plugins/zammad/statistics');
|
|
||||||
if (statsResponse.ok) {
|
|
||||||
const statsData = await statsResponse.json();
|
|
||||||
setStats(statsData);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load recent tickets
|
|
||||||
const ticketsResponse = await fetch('/api/v1/plugins/zammad/tickets?limit=10');
|
|
||||||
if (ticketsResponse.ok) {
|
|
||||||
const ticketsData = await ticketsResponse.json();
|
|
||||||
setTickets(ticketsData.tickets || []);
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (err) {
|
|
||||||
setError('Failed to load dashboard data');
|
|
||||||
console.error('Dashboard load error:', err);
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleSyncTickets = async () => {
|
|
||||||
setSyncing(true);
|
|
||||||
try {
|
|
||||||
const response = await fetch('/api/v1/plugins/zammad/tickets/sync', {
|
|
||||||
method: 'GET'
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
const result = await response.json();
|
|
||||||
// Reload dashboard data after sync
|
|
||||||
await loadDashboardData();
|
|
||||||
// Show success message with sync count
|
|
||||||
console.log(`Synced ${result.synced_count} tickets`);
|
|
||||||
} else {
|
|
||||||
throw new Error('Sync failed');
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
setError('Failed to sync tickets');
|
|
||||||
} finally {
|
|
||||||
setSyncing(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleTicketClick = (ticket: ZammadTicket) => {
|
|
||||||
setSelectedTicket(ticket);
|
|
||||||
setDialogOpen(true);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleSummarizeTicket = async (ticketId: string) => {
|
|
||||||
try {
|
|
||||||
const response = await fetch(`/api/v1/plugins/zammad/tickets/${ticketId}/summarize`, {
|
|
||||||
method: 'POST'
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
// Show success message
|
|
||||||
console.log('Summarization started');
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error('Summarization failed:', err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getStatusColor = (status: string) => {
|
|
||||||
switch (status.toLowerCase()) {
|
|
||||||
case 'open': return 'error';
|
|
||||||
case 'pending': return 'warning';
|
|
||||||
case 'closed': return 'success';
|
|
||||||
default: return 'default';
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getPriorityColor = (priority: string) => {
|
|
||||||
switch (priority) {
|
|
||||||
case '3 high': return 'error';
|
|
||||||
case '2 normal': return 'warning';
|
|
||||||
case '1 low': return 'success';
|
|
||||||
default: return 'default';
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Box>
|
|
||||||
{/* Header */}
|
|
||||||
<Box display="flex" justifyContent="space-between" alignItems="center" mb={3}>
|
|
||||||
<Typography variant="h4" component="h1">
|
|
||||||
Zammad Dashboard
|
|
||||||
</Typography>
|
|
||||||
|
|
||||||
<Box display="flex" gap={2}>
|
|
||||||
<Button
|
|
||||||
variant="outlined"
|
|
||||||
startIcon={<SyncIcon />}
|
|
||||||
onClick={handleSyncTickets}
|
|
||||||
disabled={syncing}
|
|
||||||
>
|
|
||||||
{syncing ? 'Syncing...' : 'Sync Tickets'}
|
|
||||||
</Button>
|
|
||||||
|
|
||||||
<Button
|
|
||||||
variant="outlined"
|
|
||||||
startIcon={<RefreshIcon />}
|
|
||||||
onClick={loadDashboardData}
|
|
||||||
disabled={loading}
|
|
||||||
>
|
|
||||||
Refresh
|
|
||||||
</Button>
|
|
||||||
</Box>
|
|
||||||
</Box>
|
|
||||||
|
|
||||||
{error && (
|
|
||||||
<Alert severity="error" sx={{ mb: 3 }}>
|
|
||||||
{error}
|
|
||||||
</Alert>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{loading && <LinearProgress sx={{ mb: 3 }} />}
|
|
||||||
|
|
||||||
{/* Statistics Cards */}
|
|
||||||
{stats && (
|
|
||||||
<Grid container spacing={3} sx={{ mb: 4 }}>
|
|
||||||
<Grid item xs={12} sm={6} md={3}>
|
|
||||||
<Card>
|
|
||||||
<CardContent>
|
|
||||||
<Box display="flex" alignItems="center" gap={2}>
|
|
||||||
<TicketIcon color="primary" />
|
|
||||||
<Box>
|
|
||||||
<Typography variant="h6">{stats.total_tickets}</Typography>
|
|
||||||
<Typography variant="body2" color="text.secondary">
|
|
||||||
Total Tickets
|
|
||||||
</Typography>
|
|
||||||
</Box>
|
|
||||||
</Box>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
</Grid>
|
|
||||||
|
|
||||||
<Grid item xs={12} sm={6} md={3}>
|
|
||||||
<Card>
|
|
||||||
<CardContent>
|
|
||||||
<Box display="flex" alignItems="center" gap={2}>
|
|
||||||
<AIIcon color="secondary" />
|
|
||||||
<Box>
|
|
||||||
<Typography variant="h6">{stats.tickets_with_summaries}</Typography>
|
|
||||||
<Typography variant="body2" color="text.secondary">
|
|
||||||
AI Summaries
|
|
||||||
</Typography>
|
|
||||||
</Box>
|
|
||||||
</Box>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
</Grid>
|
|
||||||
|
|
||||||
<Grid item xs={12} sm={6} md={3}>
|
|
||||||
<Card>
|
|
||||||
<CardContent>
|
|
||||||
<Box display="flex" alignItems="center" gap={2}>
|
|
||||||
<AnalyticsIcon color="success" />
|
|
||||||
<Box>
|
|
||||||
<Typography variant="h6">{stats.summary_rate}%</Typography>
|
|
||||||
<Typography variant="body2" color="text.secondary">
|
|
||||||
Summary Rate
|
|
||||||
</Typography>
|
|
||||||
</Box>
|
|
||||||
</Box>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
</Grid>
|
|
||||||
|
|
||||||
<Grid item xs={12} sm={6} md={3}>
|
|
||||||
<Card>
|
|
||||||
<CardContent>
|
|
||||||
<Box display="flex" alignItems="center" gap={2}>
|
|
||||||
<RefreshIcon color="info" />
|
|
||||||
<Box>
|
|
||||||
<Typography variant="h6">{stats.recent_tickets}</Typography>
|
|
||||||
<Typography variant="body2" color="text.secondary">
|
|
||||||
Recent (7 days)
|
|
||||||
</Typography>
|
|
||||||
</Box>
|
|
||||||
</Box>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
</Grid>
|
|
||||||
</Grid>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Recent Tickets Table */}
|
|
||||||
<Card>
|
|
||||||
<CardContent>
|
|
||||||
<Box display="flex" justifyContent="space-between" alignItems="center" mb={2}>
|
|
||||||
<Typography variant="h6">Recent Tickets</Typography>
|
|
||||||
<Button
|
|
||||||
size="small"
|
|
||||||
endIcon={<OpenIcon />}
|
|
||||||
onClick={() => window.location.hash = '#/plugins/zammad/tickets'}
|
|
||||||
>
|
|
||||||
View All
|
|
||||||
</Button>
|
|
||||||
</Box>
|
|
||||||
|
|
||||||
{tickets.length === 0 ? (
|
|
||||||
<Typography variant="body2" color="text.secondary" textAlign="center" py={4}>
|
|
||||||
No tickets found. Try syncing with Zammad.
|
|
||||||
</Typography>
|
|
||||||
) : (
|
|
||||||
<Table>
|
|
||||||
<TableHead>
|
|
||||||
<TableRow>
|
|
||||||
<TableCell>Title</TableCell>
|
|
||||||
<TableCell>Status</TableCell>
|
|
||||||
<TableCell>Priority</TableCell>
|
|
||||||
<TableCell>AI Summary</TableCell>
|
|
||||||
<TableCell>Actions</TableCell>
|
|
||||||
</TableRow>
|
|
||||||
</TableHead>
|
|
||||||
<TableBody>
|
|
||||||
{tickets.map((ticket) => (
|
|
||||||
<TableRow key={ticket.id} hover onClick={() => handleTicketClick(ticket)}>
|
|
||||||
<TableCell>
|
|
||||||
<Typography variant="body2" noWrap sx={{ maxWidth: 200 }}>
|
|
||||||
{ticket.title}
|
|
||||||
</Typography>
|
|
||||||
</TableCell>
|
|
||||||
<TableCell>
|
|
||||||
<Chip
|
|
||||||
label={ticket.status}
|
|
||||||
color={getStatusColor(ticket.status) as any}
|
|
||||||
size="small"
|
|
||||||
/>
|
|
||||||
</TableCell>
|
|
||||||
<TableCell>
|
|
||||||
<Chip
|
|
||||||
label={ticket.priority}
|
|
||||||
color={getPriorityColor(ticket.priority) as any}
|
|
||||||
size="small"
|
|
||||||
variant="outlined"
|
|
||||||
/>
|
|
||||||
</TableCell>
|
|
||||||
<TableCell>
|
|
||||||
{ticket.ai_summary ? (
|
|
||||||
<Chip label="Available" color="success" size="small" />
|
|
||||||
) : (
|
|
||||||
<Chip label="None" color="default" size="small" />
|
|
||||||
)}
|
|
||||||
</TableCell>
|
|
||||||
<TableCell>
|
|
||||||
<Tooltip title="Generate AI Summary">
|
|
||||||
<IconButton
|
|
||||||
size="small"
|
|
||||||
onClick={(e) => {
|
|
||||||
e.stopPropagation();
|
|
||||||
handleSummarizeTicket(ticket.id);
|
|
||||||
}}
|
|
||||||
disabled={!!ticket.ai_summary}
|
|
||||||
>
|
|
||||||
<AIIcon />
|
|
||||||
</IconButton>
|
|
||||||
</Tooltip>
|
|
||||||
</TableCell>
|
|
||||||
</TableRow>
|
|
||||||
))}
|
|
||||||
</TableBody>
|
|
||||||
</Table>
|
|
||||||
)}
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
{/* Ticket Detail Dialog */}
|
|
||||||
<Dialog
|
|
||||||
open={dialogOpen}
|
|
||||||
onClose={() => setDialogOpen(false)}
|
|
||||||
maxWidth="md"
|
|
||||||
fullWidth
|
|
||||||
>
|
|
||||||
<DialogTitle>
|
|
||||||
Ticket Details
|
|
||||||
</DialogTitle>
|
|
||||||
|
|
||||||
<DialogContent>
|
|
||||||
{selectedTicket && (
|
|
||||||
<Box>
|
|
||||||
<Typography variant="h6" gutterBottom>
|
|
||||||
{selectedTicket.title}
|
|
||||||
</Typography>
|
|
||||||
|
|
||||||
<Box display="flex" gap={2} mb={2}>
|
|
||||||
<Chip label={selectedTicket.status} color={getStatusColor(selectedTicket.status) as any} />
|
|
||||||
<Chip label={selectedTicket.priority} color={getPriorityColor(selectedTicket.priority) as any} />
|
|
||||||
</Box>
|
|
||||||
|
|
||||||
<Typography variant="body2" color="text.secondary" paragraph>
|
|
||||||
Customer: {selectedTicket.customer_id}
|
|
||||||
</Typography>
|
|
||||||
|
|
||||||
<Typography variant="body2" color="text.secondary" paragraph>
|
|
||||||
Created: {new Date(selectedTicket.created_at).toLocaleString()}
|
|
||||||
</Typography>
|
|
||||||
|
|
||||||
{selectedTicket.ai_summary && (
|
|
||||||
<Box mt={2}>
|
|
||||||
<Typography variant="subtitle2" gutterBottom>
|
|
||||||
AI Summary
|
|
||||||
</Typography>
|
|
||||||
<Typography variant="body2" sx={{
|
|
||||||
backgroundColor: 'grey.100',
|
|
||||||
p: 2,
|
|
||||||
borderRadius: 1
|
|
||||||
}}>
|
|
||||||
{selectedTicket.ai_summary}
|
|
||||||
</Typography>
|
|
||||||
</Box>
|
|
||||||
)}
|
|
||||||
</Box>
|
|
||||||
)}
|
|
||||||
</DialogContent>
|
|
||||||
|
|
||||||
<DialogActions>
|
|
||||||
<Button onClick={() => setDialogOpen(false)}>
|
|
||||||
Close
|
|
||||||
</Button>
|
|
||||||
{selectedTicket && !selectedTicket.ai_summary && (
|
|
||||||
<Button
|
|
||||||
variant="contained"
|
|
||||||
startIcon={<AIIcon />}
|
|
||||||
onClick={() => {
|
|
||||||
handleSummarizeTicket(selectedTicket.id);
|
|
||||||
setDialogOpen(false);
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
Generate Summary
|
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
</DialogActions>
|
|
||||||
</Dialog>
|
|
||||||
</Box>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
@@ -1,512 +0,0 @@
|
|||||||
/**
|
|
||||||
* Zammad Plugin Settings Component
|
|
||||||
* Configuration interface for Zammad plugin
|
|
||||||
*/
|
|
||||||
import React, { useState, useEffect } from 'react';
|
|
||||||
import {
|
|
||||||
Box,
|
|
||||||
Card,
|
|
||||||
CardContent,
|
|
||||||
Typography,
|
|
||||||
TextField,
|
|
||||||
Button,
|
|
||||||
Switch,
|
|
||||||
FormControlLabel,
|
|
||||||
FormGroup,
|
|
||||||
Select,
|
|
||||||
MenuItem,
|
|
||||||
FormControl,
|
|
||||||
InputLabel,
|
|
||||||
Alert,
|
|
||||||
Divider,
|
|
||||||
Accordion,
|
|
||||||
AccordionSummary,
|
|
||||||
AccordionDetails,
|
|
||||||
Chip,
|
|
||||||
LinearProgress
|
|
||||||
} from '@mui/material';
|
|
||||||
import {
|
|
||||||
ExpandMore as ExpandMoreIcon,
|
|
||||||
Save as SaveIcon,
|
|
||||||
TestTube as TestIcon,
|
|
||||||
Security as SecurityIcon,
|
|
||||||
Sync as SyncIcon,
|
|
||||||
Smart as AIIcon
|
|
||||||
} from '@mui/icons-material';
|
|
||||||
|
|
||||||
interface ZammadConfig {
|
|
||||||
name: string;
|
|
||||||
zammad_url: string;
|
|
||||||
api_token: string;
|
|
||||||
chatbot_id: string;
|
|
||||||
ai_summarization: {
|
|
||||||
enabled: boolean;
|
|
||||||
model: string;
|
|
||||||
max_tokens: number;
|
|
||||||
auto_summarize: boolean;
|
|
||||||
};
|
|
||||||
sync_settings: {
|
|
||||||
enabled: boolean;
|
|
||||||
interval_hours: number;
|
|
||||||
sync_articles: boolean;
|
|
||||||
max_tickets_per_sync: number;
|
|
||||||
};
|
|
||||||
webhook_settings: {
|
|
||||||
secret: string;
|
|
||||||
enabled_events: string[];
|
|
||||||
};
|
|
||||||
notification_settings: {
|
|
||||||
email_notifications: boolean;
|
|
||||||
slack_webhook_url: string;
|
|
||||||
notification_events: string[];
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const defaultConfig: ZammadConfig = {
|
|
||||||
name: '',
|
|
||||||
zammad_url: '',
|
|
||||||
api_token: '',
|
|
||||||
chatbot_id: '',
|
|
||||||
ai_summarization: {
|
|
||||||
enabled: true,
|
|
||||||
model: 'gpt-3.5-turbo',
|
|
||||||
max_tokens: 150,
|
|
||||||
auto_summarize: true
|
|
||||||
},
|
|
||||||
sync_settings: {
|
|
||||||
enabled: true,
|
|
||||||
interval_hours: 2,
|
|
||||||
sync_articles: true,
|
|
||||||
max_tickets_per_sync: 100
|
|
||||||
},
|
|
||||||
webhook_settings: {
|
|
||||||
secret: '',
|
|
||||||
enabled_events: ['ticket.create', 'ticket.update']
|
|
||||||
},
|
|
||||||
notification_settings: {
|
|
||||||
email_notifications: false,
|
|
||||||
slack_webhook_url: '',
|
|
||||||
notification_events: ['sync_error', 'api_error']
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const ZammadSettings: React.FC = () => {
|
|
||||||
const [config, setConfig] = useState<ZammadConfig>(defaultConfig);
|
|
||||||
const [loading, setLoading] = useState(false);
|
|
||||||
const [saving, setSaving] = useState(false);
|
|
||||||
const [testing, setTesting] = useState(false);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
const [success, setSuccess] = useState<string | null>(null);
|
|
||||||
const [testResult, setTestResult] = useState<any>(null);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
loadConfiguration();
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const loadConfiguration = async () => {
|
|
||||||
setLoading(true);
|
|
||||||
try {
|
|
||||||
const response = await fetch('/api/v1/plugins/zammad/configurations');
|
|
||||||
if (response.ok) {
|
|
||||||
const data = await response.json();
|
|
||||||
if (data.configurations.length > 0) {
|
|
||||||
// Load the first (active) configuration
|
|
||||||
const loadedConfig = data.configurations[0];
|
|
||||||
setConfig({
|
|
||||||
...defaultConfig,
|
|
||||||
...loadedConfig
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
setError('Failed to load configuration');
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleConfigChange = (path: string, value: any) => {
|
|
||||||
setConfig(prev => {
|
|
||||||
const newConfig = { ...prev };
|
|
||||||
const keys = path.split('.');
|
|
||||||
let current: any = newConfig;
|
|
||||||
|
|
||||||
for (let i = 0; i < keys.length - 1; i++) {
|
|
||||||
current = current[keys[i]];
|
|
||||||
}
|
|
||||||
|
|
||||||
current[keys[keys.length - 1]] = value;
|
|
||||||
return newConfig;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleTestConnection = async () => {
|
|
||||||
setTesting(true);
|
|
||||||
setTestResult(null);
|
|
||||||
setError(null);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch('/api/v1/plugins/zammad/configurations/test', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
zammad_url: config.zammad_url,
|
|
||||||
api_token: config.api_token
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await response.json();
|
|
||||||
setTestResult(result);
|
|
||||||
|
|
||||||
if (!result.success) {
|
|
||||||
setError(`Connection test failed: ${result.error}`);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
setError('Connection test failed');
|
|
||||||
} finally {
|
|
||||||
setTesting(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleSaveConfiguration = async () => {
|
|
||||||
setSaving(true);
|
|
||||||
setError(null);
|
|
||||||
setSuccess(null);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch('/api/v1/plugins/zammad/configurations', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
},
|
|
||||||
body: JSON.stringify(config)
|
|
||||||
});
|
|
||||||
|
|
||||||
if (response.ok) {
|
|
||||||
setSuccess('Configuration saved successfully');
|
|
||||||
} else {
|
|
||||||
const errorData = await response.json();
|
|
||||||
setError(errorData.detail || 'Failed to save configuration');
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
setError('Failed to save configuration');
|
|
||||||
} finally {
|
|
||||||
setSaving(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleArrayToggle = (path: string, value: string) => {
|
|
||||||
const currentArray = path.split('.').reduce((obj, key) => obj[key], config) as string[];
|
|
||||||
const newArray = currentArray.includes(value)
|
|
||||||
? currentArray.filter(item => item !== value)
|
|
||||||
: [...currentArray, value];
|
|
||||||
handleConfigChange(path, newArray);
|
|
||||||
};
|
|
||||||
|
|
||||||
if (loading) {
|
|
||||||
return (
|
|
||||||
<Box>
|
|
||||||
<Typography variant="h4" gutterBottom>Zammad Settings</Typography>
|
|
||||||
<LinearProgress />
|
|
||||||
</Box>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Box>
|
|
||||||
<Box display="flex" justifyContent="space-between" alignItems="center" mb={3}>
|
|
||||||
<Typography variant="h4" component="h1">
|
|
||||||
Zammad Settings
|
|
||||||
</Typography>
|
|
||||||
|
|
||||||
<Box display="flex" gap={2}>
|
|
||||||
<Button
|
|
||||||
variant="outlined"
|
|
||||||
startIcon={<TestIcon />}
|
|
||||||
onClick={handleTestConnection}
|
|
||||||
disabled={testing || !config.zammad_url || !config.api_token}
|
|
||||||
>
|
|
||||||
{testing ? 'Testing...' : 'Test Connection'}
|
|
||||||
</Button>
|
|
||||||
|
|
||||||
<Button
|
|
||||||
variant="contained"
|
|
||||||
startIcon={<SaveIcon />}
|
|
||||||
onClick={handleSaveConfiguration}
|
|
||||||
disabled={saving}
|
|
||||||
>
|
|
||||||
{saving ? 'Saving...' : 'Save Configuration'}
|
|
||||||
</Button>
|
|
||||||
</Box>
|
|
||||||
</Box>
|
|
||||||
|
|
||||||
{error && (
|
|
||||||
<Alert severity="error" sx={{ mb: 3 }}>
|
|
||||||
{error}
|
|
||||||
</Alert>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{success && (
|
|
||||||
<Alert severity="success" sx={{ mb: 3 }}>
|
|
||||||
{success}
|
|
||||||
</Alert>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{testResult && (
|
|
||||||
<Alert
|
|
||||||
severity={testResult.success ? 'success' : 'error'}
|
|
||||||
sx={{ mb: 3 }}
|
|
||||||
>
|
|
||||||
{testResult.success
|
|
||||||
? `Connection successful! User: ${testResult.user}, Version: ${testResult.zammad_version}`
|
|
||||||
: `Connection failed: ${testResult.error}`
|
|
||||||
}
|
|
||||||
</Alert>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Basic Configuration */}
|
|
||||||
<Card sx={{ mb: 3 }}>
|
|
||||||
<CardContent>
|
|
||||||
<Typography variant="h6" gutterBottom>
|
|
||||||
Basic Configuration
|
|
||||||
</Typography>
|
|
||||||
|
|
||||||
<Box display="flex" flexDirection="column" gap={3}>
|
|
||||||
<TextField
|
|
||||||
label="Configuration Name"
|
|
||||||
value={config.name}
|
|
||||||
onChange={(e) => handleConfigChange('name', e.target.value)}
|
|
||||||
fullWidth
|
|
||||||
required
|
|
||||||
/>
|
|
||||||
|
|
||||||
<TextField
|
|
||||||
label="Zammad URL"
|
|
||||||
value={config.zammad_url}
|
|
||||||
onChange={(e) => handleConfigChange('zammad_url', e.target.value)}
|
|
||||||
fullWidth
|
|
||||||
required
|
|
||||||
placeholder="https://company.zammad.com"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<TextField
|
|
||||||
label="API Token"
|
|
||||||
type="password"
|
|
||||||
value={config.api_token}
|
|
||||||
onChange={(e) => handleConfigChange('api_token', e.target.value)}
|
|
||||||
fullWidth
|
|
||||||
required
|
|
||||||
helperText="Zammad API token with ticket read/write permissions"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<TextField
|
|
||||||
label="Chatbot ID"
|
|
||||||
value={config.chatbot_id}
|
|
||||||
onChange={(e) => handleConfigChange('chatbot_id', e.target.value)}
|
|
||||||
fullWidth
|
|
||||||
required
|
|
||||||
helperText="Platform chatbot ID for AI summarization"
|
|
||||||
/>
|
|
||||||
</Box>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
{/* AI Summarization Settings */}
|
|
||||||
<Accordion sx={{ mb: 2 }}>
|
|
||||||
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
|
|
||||||
<Box display="flex" alignItems="center" gap={2}>
|
|
||||||
<AIIcon />
|
|
||||||
<Typography variant="h6">AI Summarization</Typography>
|
|
||||||
<Chip
|
|
||||||
label={config.ai_summarization.enabled ? 'Enabled' : 'Disabled'}
|
|
||||||
color={config.ai_summarization.enabled ? 'success' : 'default'}
|
|
||||||
size="small"
|
|
||||||
/>
|
|
||||||
</Box>
|
|
||||||
</AccordionSummary>
|
|
||||||
<AccordionDetails>
|
|
||||||
<Box display="flex" flexDirection="column" gap={3}>
|
|
||||||
<FormControlLabel
|
|
||||||
control={
|
|
||||||
<Switch
|
|
||||||
checked={config.ai_summarization.enabled}
|
|
||||||
onChange={(e) => handleConfigChange('ai_summarization.enabled', e.target.checked)}
|
|
||||||
/>
|
|
||||||
}
|
|
||||||
label="Enable AI Summarization"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<FormControl fullWidth>
|
|
||||||
<InputLabel>AI Model</InputLabel>
|
|
||||||
<Select
|
|
||||||
value={config.ai_summarization.model}
|
|
||||||
onChange={(e) => handleConfigChange('ai_summarization.model', e.target.value)}
|
|
||||||
label="AI Model"
|
|
||||||
>
|
|
||||||
<MenuItem value="gpt-3.5-turbo">GPT-3.5 Turbo</MenuItem>
|
|
||||||
<MenuItem value="gpt-4">GPT-4</MenuItem>
|
|
||||||
<MenuItem value="claude-3-sonnet">Claude 3 Sonnet</MenuItem>
|
|
||||||
</Select>
|
|
||||||
</FormControl>
|
|
||||||
|
|
||||||
<TextField
|
|
||||||
label="Max Summary Tokens"
|
|
||||||
type="number"
|
|
||||||
value={config.ai_summarization.max_tokens}
|
|
||||||
onChange={(e) => handleConfigChange('ai_summarization.max_tokens', parseInt(e.target.value))}
|
|
||||||
inputProps={{ min: 50, max: 500 }}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<FormControlLabel
|
|
||||||
control={
|
|
||||||
<Switch
|
|
||||||
checked={config.ai_summarization.auto_summarize}
|
|
||||||
onChange={(e) => handleConfigChange('ai_summarization.auto_summarize', e.target.checked)}
|
|
||||||
/>
|
|
||||||
}
|
|
||||||
label="Auto-summarize New Tickets"
|
|
||||||
/>
|
|
||||||
</Box>
|
|
||||||
</AccordionDetails>
|
|
||||||
</Accordion>
|
|
||||||
|
|
||||||
{/* Sync Settings */}
|
|
||||||
<Accordion sx={{ mb: 2 }}>
|
|
||||||
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
|
|
||||||
<Box display="flex" alignItems="center" gap={2}>
|
|
||||||
<SyncIcon />
|
|
||||||
<Typography variant="h6">Sync Settings</Typography>
|
|
||||||
<Chip
|
|
||||||
label={config.sync_settings.enabled ? 'Enabled' : 'Disabled'}
|
|
||||||
color={config.sync_settings.enabled ? 'success' : 'default'}
|
|
||||||
size="small"
|
|
||||||
/>
|
|
||||||
</Box>
|
|
||||||
</AccordionSummary>
|
|
||||||
<AccordionDetails>
|
|
||||||
<Box display="flex" flexDirection="column" gap={3}>
|
|
||||||
<FormControlLabel
|
|
||||||
control={
|
|
||||||
<Switch
|
|
||||||
checked={config.sync_settings.enabled}
|
|
||||||
onChange={(e) => handleConfigChange('sync_settings.enabled', e.target.checked)}
|
|
||||||
/>
|
|
||||||
}
|
|
||||||
label="Enable Automatic Sync"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<TextField
|
|
||||||
label="Sync Interval (Hours)"
|
|
||||||
type="number"
|
|
||||||
value={config.sync_settings.interval_hours}
|
|
||||||
onChange={(e) => handleConfigChange('sync_settings.interval_hours', parseInt(e.target.value))}
|
|
||||||
inputProps={{ min: 1, max: 24 }}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<FormControlLabel
|
|
||||||
control={
|
|
||||||
<Switch
|
|
||||||
checked={config.sync_settings.sync_articles}
|
|
||||||
onChange={(e) => handleConfigChange('sync_settings.sync_articles', e.target.checked)}
|
|
||||||
/>
|
|
||||||
}
|
|
||||||
label="Sync Ticket Articles"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<TextField
|
|
||||||
label="Max Tickets Per Sync"
|
|
||||||
type="number"
|
|
||||||
value={config.sync_settings.max_tickets_per_sync}
|
|
||||||
onChange={(e) => handleConfigChange('sync_settings.max_tickets_per_sync', parseInt(e.target.value))}
|
|
||||||
inputProps={{ min: 10, max: 1000 }}
|
|
||||||
/>
|
|
||||||
</Box>
|
|
||||||
</AccordionDetails>
|
|
||||||
</Accordion>
|
|
||||||
|
|
||||||
{/* Webhook Settings */}
|
|
||||||
<Accordion sx={{ mb: 2 }}>
|
|
||||||
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
|
|
||||||
<Box display="flex" alignItems="center" gap={2}>
|
|
||||||
<SecurityIcon />
|
|
||||||
<Typography variant="h6">Webhook Settings</Typography>
|
|
||||||
</Box>
|
|
||||||
</AccordionSummary>
|
|
||||||
<AccordionDetails>
|
|
||||||
<Box display="flex" flexDirection="column" gap={3}>
|
|
||||||
<TextField
|
|
||||||
label="Webhook Secret"
|
|
||||||
type="password"
|
|
||||||
value={config.webhook_settings.secret}
|
|
||||||
onChange={(e) => handleConfigChange('webhook_settings.secret', e.target.value)}
|
|
||||||
fullWidth
|
|
||||||
helperText="Secret for webhook signature validation"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<Typography variant="subtitle2">Enabled Webhook Events</Typography>
|
|
||||||
<FormGroup>
|
|
||||||
{['ticket.create', 'ticket.update', 'ticket.close', 'article.create'].map((event) => (
|
|
||||||
<FormControlLabel
|
|
||||||
key={event}
|
|
||||||
control={
|
|
||||||
<Switch
|
|
||||||
checked={config.webhook_settings.enabled_events.includes(event)}
|
|
||||||
onChange={() => handleArrayToggle('webhook_settings.enabled_events', event)}
|
|
||||||
/>
|
|
||||||
}
|
|
||||||
label={event}
|
|
||||||
/>
|
|
||||||
))}
|
|
||||||
</FormGroup>
|
|
||||||
</Box>
|
|
||||||
</AccordionDetails>
|
|
||||||
</Accordion>
|
|
||||||
|
|
||||||
{/* Notification Settings */}
|
|
||||||
<Accordion>
|
|
||||||
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
|
|
||||||
<Typography variant="h6">Notification Settings</Typography>
|
|
||||||
</AccordionSummary>
|
|
||||||
<AccordionDetails>
|
|
||||||
<Box display="flex" flexDirection="column" gap={3}>
|
|
||||||
<FormControlLabel
|
|
||||||
control={
|
|
||||||
<Switch
|
|
||||||
checked={config.notification_settings.email_notifications}
|
|
||||||
onChange={(e) => handleConfigChange('notification_settings.email_notifications', e.target.checked)}
|
|
||||||
/>
|
|
||||||
}
|
|
||||||
label="Email Notifications"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<TextField
|
|
||||||
label="Slack Webhook URL"
|
|
||||||
value={config.notification_settings.slack_webhook_url}
|
|
||||||
onChange={(e) => handleConfigChange('notification_settings.slack_webhook_url', e.target.value)}
|
|
||||||
fullWidth
|
|
||||||
placeholder="https://hooks.slack.com/services/..."
|
|
||||||
/>
|
|
||||||
|
|
||||||
<Typography variant="subtitle2">Notification Events</Typography>
|
|
||||||
<FormGroup>
|
|
||||||
{['sync_error', 'api_error', 'new_tickets', 'summarization_complete'].map((event) => (
|
|
||||||
<FormControlLabel
|
|
||||||
key={event}
|
|
||||||
control={
|
|
||||||
<Switch
|
|
||||||
checked={config.notification_settings.notification_events.includes(event)}
|
|
||||||
onChange={() => handleArrayToggle('notification_settings.notification_events', event)}
|
|
||||||
/>
|
|
||||||
}
|
|
||||||
label={event.replace('_', ' ').replace(/\b\w/g, l => l.toUpperCase())}
|
|
||||||
/>
|
|
||||||
))}
|
|
||||||
</FormGroup>
|
|
||||||
</Box>
|
|
||||||
</AccordionDetails>
|
|
||||||
</Accordion>
|
|
||||||
</Box>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
Reference in New Issue
Block a user