zammad fixes

This commit is contained in:
2025-08-21 11:34:01 +02:00
parent 27ee8b4cdb
commit c8e82edc4c
11 changed files with 273 additions and 149 deletions

View File

@@ -115,7 +115,7 @@ export default function ApiKeysPage() {
try {
setLoading(true);
const token = localStorage.getItem("token");
const response = await fetch("/api/llm/api-keys", {
const response = await fetch("/api/v1/api-keys", {
headers: {
"Authorization": `Bearer ${token}`,
"Content-Type": "application/json",
@@ -145,7 +145,7 @@ export default function ApiKeysPage() {
setActionLoading("create");
const token = localStorage.getItem("token");
const response = await fetch("/api/llm/api-keys", {
const response = await fetch("/api/v1/api-keys", {
method: "POST",
headers: {
"Authorization": `Bearer ${token}`,
@@ -199,7 +199,7 @@ export default function ApiKeysPage() {
setActionLoading(`toggle-${keyId}`);
const token = localStorage.getItem("token");
const response = await fetch(`/api/llm/api-keys/${keyId}`, {
const response = await fetch(`/api/v1/api-keys/${keyId}`, {
method: "PUT",
headers: {
"Authorization": `Bearer ${token}`,
@@ -236,7 +236,7 @@ export default function ApiKeysPage() {
setActionLoading(`regenerate-${keyId}`);
const token = localStorage.getItem("token");
const response = await fetch(`/api/llm/api-keys/${keyId}/regenerate`, {
const response = await fetch(`/api/v1/api-keys/${keyId}/regenerate`, {
method: "POST",
headers: {
"Authorization": `Bearer ${token}`,
@@ -280,7 +280,7 @@ export default function ApiKeysPage() {
setActionLoading(`delete-${keyId}`);
const token = localStorage.getItem("token");
const response = await fetch(`/api/llm/api-keys/${keyId}`, {
const response = await fetch(`/api/v1/api-keys/${keyId}`, {
method: "DELETE",
headers: {
"Authorization": `Bearer ${token}`,
@@ -316,7 +316,7 @@ export default function ApiKeysPage() {
setActionLoading(`edit-${keyId}`);
const token = localStorage.getItem("token");
const response = await fetch(`/api/llm/api-keys/${keyId}`, {
const response = await fetch(`/api/v1/api-keys/${keyId}`, {
method: "PUT",
headers: {
"Authorization": `Bearer ${token}`,

View File

@@ -0,0 +1,45 @@
import { NextRequest, NextResponse } from "next/server"
const BACKEND_URL = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL || "http://enclava-backend:8000"
export async function GET(request: NextRequest) {
try {
const token = request.headers.get("authorization")
if (!token) {
return NextResponse.json({ error: "Unauthorized" }, { status: 401 })
}
const response = await fetch(`${BACKEND_URL}/api/v1/llm/models`, {
method: "GET",
headers: {
"Authorization": token,
"Content-Type": "application/json",
},
})
if (!response.ok) {
const errorData = await response.text()
return NextResponse.json(
{ error: "Failed to fetch models", details: errorData },
{ status: response.status }
)
}
const data = await response.json()
const transformedModels = data.data?.map((model: any) => ({
id: model.id,
name: model.id,
provider: model.owned_by || "unknown"
})) || []
return NextResponse.json({ data: transformedModels })
} catch (error) {
console.error("Error fetching models:", error)
return NextResponse.json(
{ error: "Internal server error" },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,38 @@
import { NextRequest, NextResponse } from "next/server"
const BACKEND_URL = process.env.INTERNAL_API_URL || process.env.NEXT_PUBLIC_API_URL || "http://enclava-backend:8000"
export async function GET(request: NextRequest) {
try {
const token = request.headers.get("authorization")
if (!token) {
return NextResponse.json({ error: "Unauthorized" }, { status: 401 })
}
const response = await fetch(`${BACKEND_URL}/api/v1/llm/providers/status`, {
method: "GET",
headers: {
"Authorization": token,
"Content-Type": "application/json",
},
})
if (!response.ok) {
const errorData = await response.text()
return NextResponse.json(
{ error: "Failed to fetch provider status", details: errorData },
{ status: response.status }
)
}
const data = await response.json()
return NextResponse.json(data)
} catch (error) {
console.error("Error fetching provider status:", error)
return NextResponse.json(
{ error: "Internal server error" },
{ status: 500 }
)
}
}

View File

@@ -118,9 +118,9 @@ function LLMPageContent() {
// Fetch API keys, budgets, and models
const [keysRes, budgetsRes, modelsRes] = await Promise.all([
fetch('/api/llm/api-keys', { headers }),
fetch('/api/llm/budgets', { headers }),
fetch('/api/llm/models', { headers })
fetch('/api/v1/api-keys', { headers }),
fetch('/api/v1/llm/budget/status', { headers }),
fetch('/api/v1/llm/models', { headers })
])
console.log('API keys response status:', keysRes.status)
@@ -163,7 +163,7 @@ function LLMPageContent() {
const createAPIKey = async () => {
try {
const token = localStorage.getItem('token')
const response = await fetch('/api/llm/api-keys', {
const response = await fetch('/api/v1/api-keys', {
method: 'POST',
headers: {
'Authorization': `Bearer ${token}`,
@@ -209,7 +209,7 @@ function LLMPageContent() {
throw new Error('No authentication token found')
}
const response = await fetch(`/api/llm/api-keys/${keyId}`, {
const response = await fetch(`/api/v1/api-keys/${keyId}`, {
method: 'DELETE',
headers: {
'Authorization': `Bearer ${token}`,

View File

@@ -69,7 +69,7 @@ export default function ChatPlayground({ selectedModel, onRequestComplete }: Cha
{ role: 'user', content: userMessage.content }
]
const response = await fetch('/api/llm/chat/completions', {
const response = await fetch('/api/v1/llm/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json'

View File

@@ -61,8 +61,8 @@ export default function ModelSelector({ value, onValueChange, filter = 'all', cl
// Fetch models and provider status in parallel
const [modelsResponse, statusResponse] = await Promise.allSettled([
fetch('/api/llm/models', { headers }),
fetch('/api/llm/providers/status', { headers })
fetch('/api/v1/llm/models', { headers }),
fetch('/api/v1/llm/providers/status', { headers })
])
// Handle models response
@@ -234,7 +234,7 @@ export default function ModelSelector({ value, onValueChange, filter = 'all', cl
</div>
</div>
<Select value={value ?? ''} onValueChange={onValueChange}>
<Select value={value || ''} onValueChange={onValueChange}>
<SelectTrigger>
<SelectValue placeholder="Select a model">
{selectedModel && (

View File

@@ -55,8 +55,8 @@ export default function ProviderHealthDashboard() {
}
const [statusResponse, metricsResponse] = await Promise.allSettled([
fetch('/api/llm/providers/status', { headers }),
fetch('/api/llm/metrics', { headers })
fetch('/api/v1/llm/providers/status', { headers }),
fetch('/api/v1/llm/metrics', { headers })
])
// Handle provider status

View File

@@ -39,7 +39,7 @@ export function useBudgetStatus(autoRefresh = true, refreshInterval = 30000) {
try {
setLoading(true)
const response = await fetch('/api/llm/budget/status')
const response = await fetch('/api/v1/llm/budget/status')
if (!response.ok) {
if (response.status === 401) {