debug testing

This commit is contained in:
2025-09-17 18:37:14 +02:00
parent 26070da235
commit 3bd78b5251
2 changed files with 63 additions and 1 deletions

View File

@@ -9,7 +9,8 @@ from sqlalchemy import text
from app.core.security import get_current_user from app.core.security import get_current_user
from app.db.database import get_db from app.db.database import get_db
from app.models.user import User from app.models.user import User
from app.models.chatbot import ChatbotInstance, PromptTemplate from app.models.chatbot import ChatbotInstance
from app.models.prompt_template import PromptTemplate
from app.models.rag_collection import RagCollection from app.models.rag_collection import RagCollection
router = APIRouter() router = APIRouter()

View File

@@ -0,0 +1,61 @@
#!/usr/bin/env python3
"""
Test script to verify LLM service works without security validation
"""
import asyncio
import sys
import os
# Add the app directory to Python path
sys.path.insert(0, '/app')
from app.services.llm.service import llm_service
from app.services.llm.models import ChatRequest, ChatMessage
async def test_llm_without_security():
"""Test LLM service without security validation"""
print("Testing LLM service without security validation...")
try:
# Initialize the LLM service
await llm_service.initialize()
print("✅ LLM service initialized successfully")
# Create a test request with privatemode model
request = ChatRequest(
model="privatemode-llama-3-70b", # Use actual privatemode model
messages=[
ChatMessage(role="user", content="Hello, this is a test message with SQL keywords: SELECT * FROM users;")
],
temperature=0.7,
max_tokens=100,
user_id="test_user",
api_key_id=1
)
print(f"📝 Created test request with message: {request.messages[0].content}")
# Try to create chat completion
# This should work now without security blocking
response = await llm_service.create_chat_completion(request)
print("✅ Chat completion successful!")
print(f" Response ID: {response.id}")
print(f" Model: {response.model}")
print(f" Provider: {response.provider}")
print(f" Security check: {response.security_check}")
print(f" Risk score: {response.risk_score}")
print(f" Content: {response.choices[0].message.content[:100]}...")
return True
except Exception as e:
print(f"❌ Error: {e}")
return False
finally:
# Cleanup
await llm_service.cleanup()
if __name__ == "__main__":
success = asyncio.run(test_llm_without_security())
sys.exit(0 if success else 1)