add provider_list

This commit is contained in:
Dax Raad
2025-05-28 12:53:22 -04:00
parent 4132fcc1b2
commit 55a6fcdd3f
14 changed files with 458 additions and 76 deletions

View File

@@ -12,9 +12,11 @@
"cac": "^6.7.14",
"clipanion": "^4.0.0-rc.4",
"diff": "^8.0.2",
"env-paths": "^3.0.0",
"hono": "^4.7.10",
"hono-openapi": "^0.4.8",
"jsdom": "^26.1.0",
"remeda": "^2.22.3",
"ts-lsp-client": "^1.0.3",
"turndown": "^7.2.0",
"vscode-jsonrpc": "^8.2.1",
@@ -170,6 +172,8 @@
"entities": ["entities@6.0.0", "", {}, "sha512-aKstq2TDOndCn4diEyp9Uq/Flu2i1GlLkc6XIDQSDMuaFE3OPW5OphLCyQ5SpSJZTb4reN+kTcYru5yIfXoRPw=="],
"env-paths": ["env-paths@3.0.0", "", {}, "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A=="],
"environment": ["environment@1.1.0", "", {}, "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q=="],
"es-toolkit": ["es-toolkit@1.38.0", "", {}, "sha512-OT3AxczYYd3W50bCj4V0hKoOAfqIy9tof0leNQYekEDxVKir3RTVTJOLij7VAe6fsCNsGhC0JqIkURpMXTCSEA=="],
@@ -290,6 +294,8 @@
"real-require": ["real-require@0.1.0", "", {}, "sha512-r/H9MzAWtrv8aSVjPCMFpDMl5q66GqtmmRkRjpHTsp4zBAa+snZyiQNlMONiUmEJcsnaw0wCauJ2GWODr/aFkg=="],
"remeda": ["remeda@2.22.3", "", { "dependencies": { "type-fest": "^4.40.1" } }, "sha512-Ka6965m9Zu9OLsysWxVf3jdJKmp6+PKzDv7HWHinEevf0JOJ9y02YpjiC/sKxRpCqGhVyvm1U+0YIj+E6DMgKw=="],
"restore-cursor": ["restore-cursor@4.0.0", "", { "dependencies": { "onetime": "^5.1.0", "signal-exit": "^3.0.2" } }, "sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg=="],
"rfdc": ["rfdc@1.4.1", "", {}, "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA=="],

View File

@@ -28,9 +28,11 @@
"cac": "^6.7.14",
"clipanion": "^4.0.0-rc.4",
"diff": "^8.0.2",
"env-paths": "^3.0.0",
"hono": "^4.7.10",
"hono-openapi": "^0.4.8",
"jsdom": "^26.1.0",
"remeda": "^2.22.3",
"ts-lsp-client": "^1.0.3",
"turndown": "^7.2.0",
"vscode-jsonrpc": "^8.2.1",

View File

@@ -1,25 +1,34 @@
import path from "node:path";
import { Log } from "../util/log";
import { z } from "zod";
import { LLM } from "../llm/llm";
export namespace Config {
const log = Log.create({ service: "config" });
export const Model = z.object({
name: z.string().optional(),
cost: z.object({
input: z.number(),
inputCached: z.number(),
output: z.number(),
outputCached: z.number(),
}),
contextWindow: z.number(),
maxTokens: z.number(),
attachment: z.boolean(),
});
export type Model = z.output<typeof Model>;
export const Provider = z.object({
options: z.record(z.string(), z.any()).optional(),
models: z.record(z.string(), Model).optional(),
});
export type Provider = z.output<typeof Provider>;
export const Info = z
.object({
providers: z
.object({
anthropic: z
.object({
apiKey: z.string().optional(),
headers: z.record(z.string(), z.string()).optional(),
baseURL: z.string().optional(),
})
.strict()
.optional(),
})
.strict()
.optional(),
providers: z.record(z.string(), Provider).optional(),
})
.strict();

25
js/src/bun/index.ts Normal file
View File

@@ -0,0 +1,25 @@
import path from "node:path";
import { Log } from "../util/log";
export namespace BunProc {
const log = Log.create({ service: "bun" });
export function run(
cmd: string[],
options?: Bun.SpawnOptions.OptionsObject<any, any, any>,
) {
const root = path.resolve(process.cwd(), process.argv0);
log.info("running", {
cmd: [root, ...cmd],
options,
});
const result = Bun.spawnSync([root, ...cmd], {
...options,
argv0: "bun",
env: {
...process.env,
...options?.env,
},
});
return result;
}
}

20
js/src/global/index.ts Normal file
View File

@@ -0,0 +1,20 @@
import envpaths from "env-paths";
import fs from "fs/promises";
const paths = envpaths("opencode", {
suffix: "",
});
await Promise.all([
fs.mkdir(paths.config, { recursive: true }),
fs.mkdir(paths.cache, { recursive: true }),
]);
export namespace Global {
export function config() {
return paths.config;
}
export function cache() {
return paths.cache;
}
}

View File

@@ -7,6 +7,7 @@ import { Session } from "./session/session";
import cac from "cac";
import { Share } from "./share/share";
import { Storage } from "./storage/storage";
import { LLM } from "./llm/llm";
const cli = cac("opencode");
@@ -90,9 +91,19 @@ cli
}
});
const result = await Session.chat(session.id, {
type: "text",
text: message.join(" "),
const providers = await LLM.providers();
const providerID = Object.keys(providers)[0];
const modelID = Object.keys(providers[providerID].info.models!)[0];
const result = await Session.chat({
sessionID: session.id,
providerID,
modelID,
parts: [
{
type: "text",
text: message.join(" "),
},
],
});
for (const part of result.parts) {

View File

@@ -1,9 +1,13 @@
import { App } from "../app";
import { Log } from "../util/log";
import { mergeDeep } from "remeda";
import path from "node:path";
import { createAnthropic } from "@ai-sdk/anthropic";
import type { LanguageModel, Provider } from "ai";
import { NoSuchModelError } from "ai";
import type { Config } from "../app/config";
import { BunProc } from "../bun";
import { Global } from "../global";
export namespace LLM {
const log = Log.create({ service: "llm" });
@@ -14,17 +18,67 @@ export namespace LLM {
}
}
const state = App.state("llm", async (app) => {
const providers: Provider[] = [];
const NATIVE_PROVIDERS: Record<string, Config.Provider> = {
anthropic: {
models: {
"claude-sonnet-4-20250514": {
name: "Claude 4 Sonnet",
cost: {
input: 3.0,
inputCached: 3.75,
output: 15.0,
outputCached: 0.3,
},
contextWindow: 200000,
maxTokens: 50000,
attachment: true,
},
},
},
};
if (process.env["ANTHROPIC_API_KEY"] || app.config.providers?.anthropic) {
log.info("loaded anthropic");
const provider = createAnthropic({
apiKey: app.config.providers?.anthropic?.apiKey,
baseURL: app.config.providers?.anthropic?.baseURL,
headers: app.config.providers?.anthropic?.headers,
});
providers.push(provider);
const AUTODETECT: Record<string, string[]> = {
anthropic: ["ANTHROPIC_API_KEY"],
};
const state = App.state("llm", async (app) => {
const providers: Record<
string,
{
info: Config.Provider;
instance: Provider;
}
> = {};
const list = mergeDeep(NATIVE_PROVIDERS, app.config.providers ?? {});
for (const [providerID, providerInfo] of Object.entries(list)) {
if (
!app.config.providers?.[providerID] &&
!AUTODETECT[providerID]?.some((env) => process.env[env])
)
continue;
const dir = path.join(
Global.cache(),
`node_modules`,
`@ai-sdk`,
providerID,
);
if (!(await Bun.file(path.join(dir, "package.json")).exists())) {
BunProc.run(["add", "--exact", `@ai-sdk/${providerID}@alpha`], {
cwd: Global.cache(),
});
}
const mod = await import(
path.join(Global.cache(), `node_modules`, `@ai-sdk`, providerID)
);
const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!];
const loaded = fn(providerInfo.options);
log.info("loaded", { provider: providerID });
providers[providerID] = {
info: providerInfo,
instance: loaded,
};
}
return {
@@ -37,23 +91,24 @@ export namespace LLM {
return state().then((state) => state.providers);
}
export async function findModel(model: string) {
export async function findModel(providerID: string, modelID: string) {
const key = `${providerID}/${modelID}`;
const s = await state();
if (s.models.has(model)) {
return s.models.get(model)!;
if (s.models.has(key)) return s.models.get(key)!;
const provider = s.providers[providerID];
if (!provider) throw new ModelNotFoundError(modelID);
log.info("loading", {
providerID,
modelID,
});
try {
const match = provider.instance.languageModel(modelID);
log.info("found", { providerID, modelID });
s.models.set(key, match);
return match;
} catch (e) {
if (e instanceof NoSuchModelError) throw new ModelNotFoundError(modelID);
throw e;
}
log.info("loading", { model });
for (const provider of s.providers) {
try {
const match = provider.languageModel(model);
log.info("found", { model });
s.models.set(model, match);
return match;
} catch (e) {
if (e instanceof NoSuchModelError) continue;
throw e;
}
}
throw new ModelNotFoundError(model);
}
}

View File

View File

@@ -0,0 +1 @@
export * as anthropic from "./anthropic";

View File

@@ -0,0 +1,11 @@
export interface ModelInfo {
cost: {
input: number;
inputCached: number;
output: number;
outputCached: number;
};
contextWindow: number;
maxTokens: number;
attachment: boolean;
}

View File

@@ -7,11 +7,18 @@ import { Session } from "../session/session";
import { resolver, validator as zValidator } from "hono-openapi/zod";
import { z } from "zod";
import "zod-openapi/extend";
import { Config } from "../app/config";
import { LLM } from "../llm/llm";
const SessionInfo = Session.Info.openapi({
ref: "Session.Info",
});
const ProviderInfo = Config.Provider.openapi({
ref: "Provider.Info",
});
type ProviderInfo = z.output<typeof ProviderInfo>;
export namespace Server {
const log = Log.create({ service: "server" });
const PORT = 16713;
@@ -156,14 +163,40 @@ export namespace Server {
"json",
z.object({
sessionID: z.string(),
providerID: z.string(),
modelID: z.string(),
parts: z.custom<Session.Message["parts"]>(),
}),
),
async (c) => {
const body = c.req.valid("json");
const msg = await Session.chat(body.sessionID, ...body.parts);
const msg = await Session.chat(body);
return c.json(msg);
},
)
.post(
"/provider_list",
describeRoute({
description: "List all providers",
responses: {
200: {
description: "List of providers",
content: {
"application/json": {
schema: resolver(z.record(z.string(), ProviderInfo)),
},
},
},
},
}),
async (c) => {
const providers = await LLM.providers();
const result: Record<string, ProviderInfo> = {};
for (const [providerID, provider] of Object.entries(providers)) {
result[providerID] = provider.info;
}
return c.json(result);
},
);
return result;

View File

@@ -127,19 +127,19 @@ export namespace Session {
}
}
export async function chat(
sessionID: string,
...parts: UIMessagePart<UIDataTypes>[]
) {
const model = await LLM.findModel("claude-sonnet-4-20250514");
const session = await get(sessionID);
const l = log.clone().tag("session", sessionID);
export async function chat(input: {
sessionID: string;
providerID: string;
modelID: string;
parts: UIMessagePart<UIDataTypes>[];
}) {
const l = log.clone().tag("session", input.sessionID);
l.info("chatting");
const msgs = await messages(sessionID);
const model = await LLM.findModel(input.providerID, input.modelID);
const msgs = await messages(input.sessionID);
async function write(msg: Message) {
return Storage.writeJSON(
"session/message/" + sessionID + "/" + msg.id,
"session/message/" + input.sessionID + "/" + msg.id,
msg,
);
}
@@ -155,7 +155,7 @@ export namespace Session {
},
],
metadata: {
sessionID,
sessionID: input.sessionID,
time: {
created: Date.now(),
},
@@ -171,7 +171,7 @@ export namespace Session {
});
}
msgs.push(system);
state().messages.set(sessionID, msgs);
state().messages.set(input.sessionID, msgs);
generateText({
messages: convertToModelMessages([
{
@@ -185,12 +185,12 @@ export namespace Session {
},
{
role: "user",
parts,
parts: input.parts,
},
]),
model,
}).then((result) => {
return Session.update(sessionID, (draft) => {
return Session.update(input.sessionID, (draft) => {
draft.title = result.text;
});
});
@@ -199,21 +199,33 @@ export namespace Session {
const msg: Message = {
role: "user",
id: Identifier.ascending("message"),
parts,
parts: input.parts,
metadata: {
time: {
created: Date.now(),
},
sessionID,
sessionID: input.sessionID,
tool: {},
},
};
msgs.push(msg);
await write(msg);
const next: Message = {
id: Identifier.ascending("message"),
role: "assistant",
parts: [],
metadata: {
time: {
created: Date.now(),
},
sessionID: input.sessionID,
tool: {},
},
};
const result = streamText({
onStepFinish: (step) => {
update(sessionID, (draft) => {
update(input.sessionID, (draft) => {
draft.tokens.input += step.usage.inputTokens || 0;
draft.tokens.output += step.usage.outputTokens || 0;
draft.tokens.reasoning += step.usage.reasoningTokens || 0;
@@ -225,18 +237,6 @@ export namespace Session {
tools,
model,
});
const next: Message = {
id: Identifier.ascending("message"),
role: "assistant",
parts: [],
metadata: {
time: {
created: Date.now(),
},
sessionID,
tool: {},
},
};
msgs.push(next);
let text: TextUIPart | undefined;