mirror of
https://github.com/aljazceru/opencode.git
synced 2025-12-21 17:54:23 +01:00
feat: Add support for OpenRouter (#92)
* Add support for OpenRouter as a new model provider - Introduced `ProviderOpenRouter` in the `models` package. - Added OpenRouter-specific models, including `GPT41`, `GPT41Mini`, `GPT4o`, and others, with their configurations and costs. - Updated `generateSchema` to include OpenRouter as a provider. - Added OpenRouter-specific environment variable handling (`OPENROUTER_API_KEY`) in `config.go`. - Implemented default model settings for OpenRouter agents in `setDefaultModelForAgent`. - Updated `getProviderAPIKey` to retrieve the OpenRouter API key. - Extended `SupportedModels` to include OpenRouter models. - Added OpenRouter client initialization in the `provider` package. - Modified `processGeneration` to handle `FinishReasonUnknown` in addition to `FinishReasonToolUse`. * [feature/openrouter-provider] Add new models and provider to schema - Added "deepseek-chat-free" and "deepseek-r1-free" to the list of supported models in `opencode-schema.json`. * [feature/openrouter-provider] Add OpenRouter provider support and integrate new models - Updated README.md to include OpenRouter as a supported provider and its configuration details. - Added `OPENROUTER_API_KEY` to environment variable configuration. - Introduced OpenRouter-specific models in `internal/llm/models/openrouter.go` with mappings to existing cost and token configurations. - Updated `internal/config/config.go` to set default models for OpenRouter agents. - Extended `opencode-schema.json` to include OpenRouter models in the schema definitions. - Refactored model IDs and names to align with OpenRouter naming conventions. * [feature/openrouter-provider] Refactor finish reason handling and tool call logic in agent and OpenAI provider - Simplified finish reason check in `agent.go` by removing redundant variable assignment. - Updated `openai.go` to override the finish reason to `FinishReasonToolUse` when tool calls are present. - Ensured consistent finish reason handling in both `send` and `stream` methods of the OpenAI provider. [feature/openrouter-provider] Refactor finish reason handling and tool call logic in agent and OpenAI provider - Simplified finish reason check in `agent.go` by removing redundant variable assignment. - Updated `openai.go` to override the finish reason to `FinishReasonToolUse` when tool calls are present. - Ensured consistent finish reason handling in both `send` and `stream` methods of the OpenAI provider. * **[feature/openrouter-provider] Add support for custom headers in OpenAI client configuration** - Introduced a new `extraHeaders` field in the `openaiOptions` struct to allow specifying additional HTTP headers. - Added logic in `newOpenAIClient` to apply `extraHeaders` to the OpenAI client configuration. - Implemented a new option function `WithOpenAIExtraHeaders` to set custom headers in `openaiOptions`. - Updated the OpenRouter provider configuration in `NewProvider` to include default headers (`HTTP-Referer` and `X-Title`) for OpenRouter API requests. * Update OpenRouter model config and remove unsupported models * [feature/openrouter-provider] Update OpenRouter models and default configurations - Added new OpenRouter models: `claude-3.5-sonnet`, `claude-3-haiku`, `claude-3.7-sonnet`, `claude-3.5-haiku`, and `claude-3-opus` in `openrouter.go`. - Updated default agent models in `config.go`: - `agents.coder.model` now uses `claude-3.7-sonnet`. - `agents.task.model` now uses `claude-3.7-sonnet`. - `agents.title.model` now uses `claude-3.5-haiku`. - Updated `opencode-schema.json` to include the new models in the allowed list for schema validation. - Adjusted logic in `setDefaultModelForAgent` to reflect the new default models. * [feature/openrouter-provider] Remove unused ProviderEvent emission in stream function The changes remove the emission of a `ProviderEvent` with type `EventContentStop` in the `stream` function of the `openaiClient` implementation. This event was sent upon successful stream completion but is no longer used.
This commit is contained in:
@@ -21,6 +21,7 @@ type openaiOptions struct {
|
||||
baseURL string
|
||||
disableCache bool
|
||||
reasoningEffort string
|
||||
extraHeaders map[string]string
|
||||
}
|
||||
|
||||
type OpenAIOption func(*openaiOptions)
|
||||
@@ -49,6 +50,12 @@ func newOpenAIClient(opts providerClientOptions) OpenAIClient {
|
||||
openaiClientOptions = append(openaiClientOptions, option.WithBaseURL(openaiOpts.baseURL))
|
||||
}
|
||||
|
||||
if openaiOpts.extraHeaders != nil {
|
||||
for key, value := range openaiOpts.extraHeaders {
|
||||
openaiClientOptions = append(openaiClientOptions, option.WithHeader(key, value))
|
||||
}
|
||||
}
|
||||
|
||||
client := openai.NewClient(openaiClientOptions...)
|
||||
return &openaiClient{
|
||||
providerOptions: opts,
|
||||
@@ -204,11 +211,18 @@ func (o *openaiClient) send(ctx context.Context, messages []message.Message, too
|
||||
content = openaiResponse.Choices[0].Message.Content
|
||||
}
|
||||
|
||||
toolCalls := o.toolCalls(*openaiResponse)
|
||||
finishReason := o.finishReason(string(openaiResponse.Choices[0].FinishReason))
|
||||
|
||||
if len(toolCalls) > 0 {
|
||||
finishReason = message.FinishReasonToolUse
|
||||
}
|
||||
|
||||
return &ProviderResponse{
|
||||
Content: content,
|
||||
ToolCalls: o.toolCalls(*openaiResponse),
|
||||
ToolCalls: toolCalls,
|
||||
Usage: o.usage(*openaiResponse),
|
||||
FinishReason: o.finishReason(string(openaiResponse.Choices[0].FinishReason)),
|
||||
FinishReason: finishReason,
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
@@ -267,13 +281,19 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
|
||||
err := openaiStream.Err()
|
||||
if err == nil || errors.Is(err, io.EOF) {
|
||||
// Stream completed successfully
|
||||
finishReason := o.finishReason(string(acc.ChatCompletion.Choices[0].FinishReason))
|
||||
|
||||
if len(toolCalls) > 0 {
|
||||
finishReason = message.FinishReasonToolUse
|
||||
}
|
||||
|
||||
eventChan <- ProviderEvent{
|
||||
Type: EventComplete,
|
||||
Response: &ProviderResponse{
|
||||
Content: currentContent,
|
||||
ToolCalls: toolCalls,
|
||||
Usage: o.usage(acc.ChatCompletion),
|
||||
FinishReason: o.finishReason(string(acc.ChatCompletion.Choices[0].FinishReason)),
|
||||
FinishReason: finishReason,
|
||||
},
|
||||
}
|
||||
close(eventChan)
|
||||
@@ -375,6 +395,12 @@ func WithOpenAIBaseURL(baseURL string) OpenAIOption {
|
||||
}
|
||||
}
|
||||
|
||||
func WithOpenAIExtraHeaders(headers map[string]string) OpenAIOption {
|
||||
return func(options *openaiOptions) {
|
||||
options.extraHeaders = headers
|
||||
}
|
||||
}
|
||||
|
||||
func WithOpenAIDisableCache() OpenAIOption {
|
||||
return func(options *openaiOptions) {
|
||||
options.disableCache = true
|
||||
|
||||
Reference in New Issue
Block a user