diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 2d30a738..14f6ec09 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -159,7 +159,7 @@ export namespace Provider { } = {} const models = new Map< string, - { providerID: string; modelID: string; info: ModelsDev.Model; language: LanguageModel } + { providerID: string; modelID: string; info: ModelsDev.Model; language: LanguageModel; npm?: string } >() const sdk = new Map() @@ -378,12 +378,14 @@ export namespace Provider { modelID, info, language, + npm: info.provider?.npm ?? provider.info.npm, }) return { modelID, providerID, info, language, + npm: info.provider?.npm ?? provider.info.npm, } } catch (e) { if (e instanceof NoSuchModelError) diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index 094d0244..c99b05a4 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -96,6 +96,11 @@ export namespace ProviderTransform { if (providerID !== "azure") { result["textVerbosity"] = "low" } + if (providerID === "opencode") { + result["promptCacheKey"] = sessionID + result["include"] = ["reasoning.encrypted_content"] + result["reasoningSummary"] = "detailed" + } } return result } diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index 6fa794a3..90ffdec7 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -268,7 +268,7 @@ export namespace SessionPrompt { maxOutputTokens: ProviderTransform.maxOutputTokens(model.providerID, outputLimit, params.options), abortSignal: abort.signal, providerOptions: { - [model.providerID]: params.options, + [model.npm === "@ai-sdk/openai" ? "openai" : model.providerID]: params.options, }, stopWhen: stepCountIs(1), temperature: params.temperature,