mirror of
https://github.com/aljazceru/ollama-free-model-proxy.git
synced 2025-12-17 21:24:18 +01:00
Initial commit
This commit is contained in:
132
provider.go
Normal file
132
provider.go
Normal file
@@ -0,0 +1,132 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
type OpenrouterProvider struct {
|
||||
client *openai.Client
|
||||
modelNames []string // Shared storage for model names
|
||||
}
|
||||
|
||||
func NewOpenrouterProvider(apiKey string) *OpenrouterProvider {
|
||||
config := openai.DefaultConfig(apiKey)
|
||||
config.BaseURL = "https://openrouter.ai/api/v1/" // Custom endpoint if needed
|
||||
return &OpenrouterProvider{
|
||||
client: openai.NewClientWithConfig(config),
|
||||
modelNames: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
func (o *OpenrouterProvider) ChatStream(messages []openai.ChatCompletionMessage, modelName string) (*openai.ChatCompletionStream, error) {
|
||||
// Create a chat completion request
|
||||
req := openai.ChatCompletionRequest{
|
||||
Model: modelName,
|
||||
Messages: messages,
|
||||
Stream: true,
|
||||
}
|
||||
|
||||
// Call the OpenAI API to get a streaming response
|
||||
stream, err := o.client.CreateChatCompletionStream(context.Background(), req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Return the stream for further processing
|
||||
return stream, nil
|
||||
}
|
||||
|
||||
type ModelDetails struct {
|
||||
ParentModel string `json:"parent_model"`
|
||||
Format string `json:"format"`
|
||||
Family string `json:"family"`
|
||||
Families []string `json:"families"`
|
||||
ParameterSize string `json:"parameter_size"`
|
||||
QuantizationLevel string `json:"quantization_level"`
|
||||
}
|
||||
|
||||
type Model struct {
|
||||
Name string `json:"name"`
|
||||
Model string `json:"model,omitempty"`
|
||||
ModifiedAt string `json:"modified_at,omitempty"`
|
||||
Size int64 `json:"size,omitempty"`
|
||||
Digest string `json:"digest,omitempty"`
|
||||
Details ModelDetails `json:"details,omitempty"`
|
||||
}
|
||||
|
||||
func (o *OpenrouterProvider) GetModels() ([]Model, error) {
|
||||
currentTime := time.Now().Format(time.RFC3339)
|
||||
|
||||
// Fetch models from the OpenAI API
|
||||
modelsResponse, err := o.client.ListModels(context.Background())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Clear shared model storage
|
||||
o.modelNames = []string{}
|
||||
|
||||
var models []Model
|
||||
for _, apiModel := range modelsResponse.Models {
|
||||
// Split model name
|
||||
parts := strings.Split(apiModel.ID, "/")
|
||||
name := parts[len(parts)-1]
|
||||
|
||||
// Store name in shared storage
|
||||
o.modelNames = append(o.modelNames, apiModel.ID)
|
||||
|
||||
// Create model struct
|
||||
model := Model{
|
||||
Name: name,
|
||||
Model: name,
|
||||
ModifiedAt: currentTime,
|
||||
Size: 0, // Stubbed size
|
||||
Digest: name,
|
||||
Details: ModelDetails{
|
||||
ParentModel: "",
|
||||
Format: "gguf",
|
||||
Family: "claude",
|
||||
Families: []string{"claude"},
|
||||
ParameterSize: "175B",
|
||||
QuantizationLevel: "Q4_K_M",
|
||||
},
|
||||
}
|
||||
models = append(models, model)
|
||||
}
|
||||
|
||||
return models, nil
|
||||
}
|
||||
|
||||
func (o *OpenrouterProvider) GetModelDetails(modelName string) (map[string]interface{}, error) {
|
||||
// Stub response; replace with actual model details if available
|
||||
currentTime := time.Now().Format(time.RFC3339)
|
||||
return map[string]interface{}{
|
||||
"license": "STUB License",
|
||||
"system": "STUB SYSTEM",
|
||||
"modifiedAt": currentTime,
|
||||
"details": map[string]interface{}{
|
||||
"format": "gguf",
|
||||
"parameter_size": "200B",
|
||||
"quantization_level": "Q4_K_M",
|
||||
},
|
||||
"model_info": map[string]interface{}{
|
||||
"architecture": "STUB",
|
||||
"context_length": 200000,
|
||||
"parameter_count": 200_000_000_000,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (o *OpenrouterProvider) GetFullModelName(alias string) (string, error) {
|
||||
for _, fullName := range o.modelNames {
|
||||
if strings.HasSuffix(fullName, alias) { // Match by alias suffix
|
||||
return fullName, nil
|
||||
}
|
||||
}
|
||||
return "", fmt.Errorf("model alias '%s' not found", alias)
|
||||
}
|
||||
Reference in New Issue
Block a user