| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218 |
- import z from "zod"
- import fuzzysort from "fuzzysort"
- import { Config } from "../config/config"
- import { mapValues, mergeDeep, omit, pickBy, sortBy } from "remeda"
- import { NoSuchModelError, type Provider as SDK } from "ai"
- import { Log } from "../util/log"
- import { BunProc } from "../bun"
- import { Plugin } from "../plugin"
- import { ModelsDev } from "./models"
- import { NamedError } from "@opencode-ai/util/error"
- import { Auth } from "../auth"
- import { Env } from "../env"
- import { Instance } from "../project/instance"
- import { Flag } from "../flag/flag"
- import { iife } from "@/util/iife"
- // Direct imports for bundled providers
- import { createAmazonBedrock, type AmazonBedrockProviderSettings } from "@ai-sdk/amazon-bedrock"
- import { createAnthropic } from "@ai-sdk/anthropic"
- import { createAzure } from "@ai-sdk/azure"
- import { createGoogleGenerativeAI } from "@ai-sdk/google"
- import { createVertex } from "@ai-sdk/google-vertex"
- import { createVertexAnthropic } from "@ai-sdk/google-vertex/anthropic"
- import { createOpenAI } from "@ai-sdk/openai"
- import { createOpenAICompatible } from "@ai-sdk/openai-compatible"
- import { createOpenRouter, type LanguageModelV2 } from "@openrouter/ai-sdk-provider"
- import { createOpenaiCompatible as createGitHubCopilotOpenAICompatible } from "./sdk/openai-compatible/src"
- import { createXai } from "@ai-sdk/xai"
- import { createMistral } from "@ai-sdk/mistral"
- import { createGroq } from "@ai-sdk/groq"
- import { createDeepInfra } from "@ai-sdk/deepinfra"
- import { createCerebras } from "@ai-sdk/cerebras"
- import { createCohere } from "@ai-sdk/cohere"
- import { createGateway } from "@ai-sdk/gateway"
- import { createTogetherAI } from "@ai-sdk/togetherai"
- import { createPerplexity } from "@ai-sdk/perplexity"
- import { createVercel } from "@ai-sdk/vercel"
- import { createGitLab } from "@gitlab/gitlab-ai-provider"
- import { ProviderTransform } from "./transform"
- export namespace Provider {
- const log = Log.create({ service: "provider" })
- function isGpt5OrLater(modelID: string): boolean {
- const match = /^gpt-(\d+)/.exec(modelID)
- if (!match) {
- return false
- }
- return Number(match[1]) >= 5
- }
- function shouldUseCopilotResponsesApi(modelID: string): boolean {
- return isGpt5OrLater(modelID) && !modelID.startsWith("gpt-5-mini")
- }
- const BUNDLED_PROVIDERS: Record<string, (options: any) => SDK> = {
- "@ai-sdk/amazon-bedrock": createAmazonBedrock,
- "@ai-sdk/anthropic": createAnthropic,
- "@ai-sdk/azure": createAzure,
- "@ai-sdk/google": createGoogleGenerativeAI,
- "@ai-sdk/google-vertex": createVertex,
- "@ai-sdk/google-vertex/anthropic": createVertexAnthropic,
- "@ai-sdk/openai": createOpenAI,
- "@ai-sdk/openai-compatible": createOpenAICompatible,
- "@openrouter/ai-sdk-provider": createOpenRouter,
- "@ai-sdk/xai": createXai,
- "@ai-sdk/mistral": createMistral,
- "@ai-sdk/groq": createGroq,
- "@ai-sdk/deepinfra": createDeepInfra,
- "@ai-sdk/cerebras": createCerebras,
- "@ai-sdk/cohere": createCohere,
- "@ai-sdk/gateway": createGateway,
- "@ai-sdk/togetherai": createTogetherAI,
- "@ai-sdk/perplexity": createPerplexity,
- "@ai-sdk/vercel": createVercel,
- "@gitlab/gitlab-ai-provider": createGitLab,
- // @ts-ignore (TODO: kill this code so we dont have to maintain it)
- "@ai-sdk/github-copilot": createGitHubCopilotOpenAICompatible,
- }
- type CustomModelLoader = (sdk: any, modelID: string, options?: Record<string, any>) => Promise<any>
- type CustomLoader = (provider: Info) => Promise<{
- autoload: boolean
- getModel?: CustomModelLoader
- options?: Record<string, any>
- }>
- const CUSTOM_LOADERS: Record<string, CustomLoader> = {
- async anthropic() {
- return {
- autoload: false,
- options: {
- headers: {
- "anthropic-beta":
- "claude-code-20250219,interleaved-thinking-2025-05-14,fine-grained-tool-streaming-2025-05-14",
- },
- },
- }
- },
- async opencode(input) {
- const hasKey = await (async () => {
- const env = Env.all()
- if (input.env.some((item) => env[item])) return true
- if (await Auth.get(input.id)) return true
- const config = await Config.get()
- if (config.provider?.["opencode"]?.options?.apiKey) return true
- return false
- })()
- if (!hasKey) {
- for (const [key, value] of Object.entries(input.models)) {
- if (value.cost.input === 0) continue
- delete input.models[key]
- }
- }
- return {
- autoload: Object.keys(input.models).length > 0,
- options: hasKey ? {} : { apiKey: "public" },
- }
- },
- openai: async () => {
- return {
- autoload: false,
- async getModel(sdk: any, modelID: string, _options?: Record<string, any>) {
- return sdk.responses(modelID)
- },
- options: {},
- }
- },
- "github-copilot": async () => {
- return {
- autoload: false,
- async getModel(sdk: any, modelID: string, _options?: Record<string, any>) {
- return shouldUseCopilotResponsesApi(modelID) ? sdk.responses(modelID) : sdk.chat(modelID)
- },
- options: {},
- }
- },
- "github-copilot-enterprise": async () => {
- return {
- autoload: false,
- async getModel(sdk: any, modelID: string, _options?: Record<string, any>) {
- return shouldUseCopilotResponsesApi(modelID) ? sdk.responses(modelID) : sdk.chat(modelID)
- },
- options: {},
- }
- },
- azure: async () => {
- return {
- autoload: false,
- async getModel(sdk: any, modelID: string, options?: Record<string, any>) {
- if (options?.["useCompletionUrls"]) {
- return sdk.chat(modelID)
- } else {
- return sdk.responses(modelID)
- }
- },
- options: {},
- }
- },
- "azure-cognitive-services": async () => {
- const resourceName = Env.get("AZURE_COGNITIVE_SERVICES_RESOURCE_NAME")
- return {
- autoload: false,
- async getModel(sdk: any, modelID: string, options?: Record<string, any>) {
- if (options?.["useCompletionUrls"]) {
- return sdk.chat(modelID)
- } else {
- return sdk.responses(modelID)
- }
- },
- options: {
- baseURL: resourceName ? `https://${resourceName}.cognitiveservices.azure.com/openai` : undefined,
- },
- }
- },
- "amazon-bedrock": async () => {
- const config = await Config.get()
- const providerConfig = config.provider?.["amazon-bedrock"]
- const auth = await Auth.get("amazon-bedrock")
- // Region precedence: 1) config file, 2) env var, 3) default
- const configRegion = providerConfig?.options?.region
- const envRegion = Env.get("AWS_REGION")
- const defaultRegion = configRegion ?? envRegion ?? "us-east-1"
- // Profile: config file takes precedence over env var
- const configProfile = providerConfig?.options?.profile
- const envProfile = Env.get("AWS_PROFILE")
- const profile = configProfile ?? envProfile
- const awsAccessKeyId = Env.get("AWS_ACCESS_KEY_ID")
- const awsBearerToken = iife(() => {
- const envToken = Env.get("AWS_BEARER_TOKEN_BEDROCK")
- if (envToken) return envToken
- if (auth?.type === "api") {
- Env.set("AWS_BEARER_TOKEN_BEDROCK", auth.key)
- return auth.key
- }
- return undefined
- })
- const awsWebIdentityTokenFile = Env.get("AWS_WEB_IDENTITY_TOKEN_FILE")
- if (!profile && !awsAccessKeyId && !awsBearerToken && !awsWebIdentityTokenFile) return { autoload: false }
- const providerOptions: AmazonBedrockProviderSettings = {
- region: defaultRegion,
- }
- // Only use credential chain if no bearer token exists
- // Bearer token takes precedence over credential chain (profiles, access keys, IAM roles, web identity tokens)
- if (!awsBearerToken) {
- const { fromNodeProviderChain } = await import(await BunProc.install("@aws-sdk/credential-providers"))
- // Build credential provider options (only pass profile if specified)
- const credentialProviderOptions = profile ? { profile } : {}
- providerOptions.credentialProvider = fromNodeProviderChain(credentialProviderOptions)
- }
- // Add custom endpoint if specified (endpoint takes precedence over baseURL)
- const endpoint = providerConfig?.options?.endpoint ?? providerConfig?.options?.baseURL
- if (endpoint) {
- providerOptions.baseURL = endpoint
- }
- return {
- autoload: true,
- options: providerOptions,
- async getModel(sdk: any, modelID: string, options?: Record<string, any>) {
- // Skip region prefixing if model already has a cross-region inference profile prefix
- if (modelID.startsWith("global.") || modelID.startsWith("jp.")) {
- return sdk.languageModel(modelID)
- }
- // Region resolution precedence (highest to lowest):
- // 1. options.region from opencode.json provider config
- // 2. defaultRegion from AWS_REGION environment variable
- // 3. Default "us-east-1" (baked into defaultRegion)
- const region = options?.region ?? defaultRegion
- let regionPrefix = region.split("-")[0]
- switch (regionPrefix) {
- case "us": {
- const modelRequiresPrefix = [
- "nova-micro",
- "nova-lite",
- "nova-pro",
- "nova-premier",
- "nova-2",
- "claude",
- "deepseek",
- ].some((m) => modelID.includes(m))
- const isGovCloud = region.startsWith("us-gov")
- if (modelRequiresPrefix && !isGovCloud) {
- modelID = `${regionPrefix}.${modelID}`
- }
- break
- }
- case "eu": {
- const regionRequiresPrefix = [
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "eu-north-1",
- "eu-central-1",
- "eu-south-1",
- "eu-south-2",
- ].some((r) => region.includes(r))
- const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "llama3", "pixtral"].some((m) =>
- modelID.includes(m),
- )
- if (regionRequiresPrefix && modelRequiresPrefix) {
- modelID = `${regionPrefix}.${modelID}`
- }
- break
- }
- case "ap": {
- const isAustraliaRegion = ["ap-southeast-2", "ap-southeast-4"].includes(region)
- const isTokyoRegion = region === "ap-northeast-1"
- if (
- isAustraliaRegion &&
- ["anthropic.claude-sonnet-4-5", "anthropic.claude-haiku"].some((m) => modelID.includes(m))
- ) {
- regionPrefix = "au"
- modelID = `${regionPrefix}.${modelID}`
- } else if (isTokyoRegion) {
- // Tokyo region uses jp. prefix for cross-region inference
- const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "nova-pro"].some((m) =>
- modelID.includes(m),
- )
- if (modelRequiresPrefix) {
- regionPrefix = "jp"
- modelID = `${regionPrefix}.${modelID}`
- }
- } else {
- // Other APAC regions use apac. prefix
- const modelRequiresPrefix = ["claude", "nova-lite", "nova-micro", "nova-pro"].some((m) =>
- modelID.includes(m),
- )
- if (modelRequiresPrefix) {
- regionPrefix = "apac"
- modelID = `${regionPrefix}.${modelID}`
- }
- }
- break
- }
- }
- return sdk.languageModel(modelID)
- },
- }
- },
- openrouter: async () => {
- return {
- autoload: false,
- options: {
- headers: {
- "HTTP-Referer": "https://opencode.ai/",
- "X-Title": "opencode",
- },
- },
- }
- },
- vercel: async () => {
- return {
- autoload: false,
- options: {
- headers: {
- "http-referer": "https://opencode.ai/",
- "x-title": "opencode",
- },
- },
- }
- },
- "google-vertex": async () => {
- const project = Env.get("GOOGLE_CLOUD_PROJECT") ?? Env.get("GCP_PROJECT") ?? Env.get("GCLOUD_PROJECT")
- const location = Env.get("GOOGLE_CLOUD_LOCATION") ?? Env.get("VERTEX_LOCATION") ?? "us-east5"
- const autoload = Boolean(project)
- if (!autoload) return { autoload: false }
- return {
- autoload: true,
- options: {
- project,
- location,
- },
- async getModel(sdk: any, modelID: string) {
- const id = String(modelID).trim()
- return sdk.languageModel(id)
- },
- }
- },
- "google-vertex-anthropic": async () => {
- const project = Env.get("GOOGLE_CLOUD_PROJECT") ?? Env.get("GCP_PROJECT") ?? Env.get("GCLOUD_PROJECT")
- const location = Env.get("GOOGLE_CLOUD_LOCATION") ?? Env.get("VERTEX_LOCATION") ?? "global"
- const autoload = Boolean(project)
- if (!autoload) return { autoload: false }
- return {
- autoload: true,
- options: {
- project,
- location,
- },
- async getModel(sdk: any, modelID) {
- const id = String(modelID).trim()
- return sdk.languageModel(id)
- },
- }
- },
- "sap-ai-core": async () => {
- const auth = await Auth.get("sap-ai-core")
- const envServiceKey = iife(() => {
- const envAICoreServiceKey = Env.get("AICORE_SERVICE_KEY")
- if (envAICoreServiceKey) return envAICoreServiceKey
- if (auth?.type === "api") {
- Env.set("AICORE_SERVICE_KEY", auth.key)
- return auth.key
- }
- return undefined
- })
- const deploymentId = Env.get("AICORE_DEPLOYMENT_ID")
- const resourceGroup = Env.get("AICORE_RESOURCE_GROUP")
- return {
- autoload: !!envServiceKey,
- options: envServiceKey ? { deploymentId, resourceGroup } : {},
- async getModel(sdk: any, modelID: string) {
- return sdk(modelID)
- },
- }
- },
- zenmux: async () => {
- return {
- autoload: false,
- options: {
- headers: {
- "HTTP-Referer": "https://opencode.ai/",
- "X-Title": "opencode",
- },
- },
- }
- },
- gitlab: async (input) => {
- const instanceUrl = Env.get("GITLAB_INSTANCE_URL") || "https://gitlab.com"
- const auth = await Auth.get(input.id)
- const apiKey = await (async () => {
- if (auth?.type === "oauth") return auth.access
- if (auth?.type === "api") return auth.key
- return Env.get("GITLAB_TOKEN")
- })()
- const config = await Config.get()
- const providerConfig = config.provider?.["gitlab"]
- return {
- autoload: !!apiKey,
- options: {
- instanceUrl,
- apiKey,
- featureFlags: {
- duo_agent_platform_agentic_chat: true,
- duo_agent_platform: true,
- ...(providerConfig?.options?.featureFlags || {}),
- },
- },
- async getModel(sdk: ReturnType<typeof createGitLab>, modelID: string) {
- return sdk.agenticChat(modelID, {
- featureFlags: {
- duo_agent_platform_agentic_chat: true,
- duo_agent_platform: true,
- ...(providerConfig?.options?.featureFlags || {}),
- },
- })
- },
- }
- },
- "cloudflare-ai-gateway": async (input) => {
- const accountId = Env.get("CLOUDFLARE_ACCOUNT_ID")
- const gateway = Env.get("CLOUDFLARE_GATEWAY_ID")
- if (!accountId || !gateway) return { autoload: false }
- // Get API token from env or auth prompt
- const apiToken = await (async () => {
- const envToken = Env.get("CLOUDFLARE_API_TOKEN")
- if (envToken) return envToken
- const auth = await Auth.get(input.id)
- if (auth?.type === "api") return auth.key
- return undefined
- })()
- return {
- autoload: true,
- async getModel(sdk: any, modelID: string, _options?: Record<string, any>) {
- return sdk.languageModel(modelID)
- },
- options: {
- baseURL: `https://gateway.ai.cloudflare.com/v1/${accountId}/${gateway}/compat`,
- headers: {
- // Cloudflare AI Gateway uses cf-aig-authorization for authenticated gateways
- // This enables Unified Billing where Cloudflare handles upstream provider auth
- ...(apiToken ? { "cf-aig-authorization": `Bearer ${apiToken}` } : {}),
- "HTTP-Referer": "https://opencode.ai/",
- "X-Title": "opencode",
- },
- // Custom fetch to handle parameter transformation and auth
- fetch: async (input: RequestInfo | URL, init?: RequestInit) => {
- const headers = new Headers(init?.headers)
- // Strip Authorization header - AI Gateway uses cf-aig-authorization instead
- headers.delete("Authorization")
- // Transform max_tokens to max_completion_tokens for newer models
- if (init?.body && init.method === "POST") {
- try {
- const body = JSON.parse(init.body as string)
- if (body.max_tokens !== undefined && !body.max_completion_tokens) {
- body.max_completion_tokens = body.max_tokens
- delete body.max_tokens
- init = { ...init, body: JSON.stringify(body) }
- }
- } catch (e) {
- // If body parsing fails, continue with original request
- }
- }
- return fetch(input, { ...init, headers })
- },
- },
- }
- },
- cerebras: async () => {
- return {
- autoload: false,
- options: {
- headers: {
- "X-Cerebras-3rd-Party-Integration": "opencode",
- },
- },
- }
- },
- }
- export const Model = z
- .object({
- id: z.string(),
- providerID: z.string(),
- api: z.object({
- id: z.string(),
- url: z.string(),
- npm: z.string(),
- }),
- name: z.string(),
- family: z.string().optional(),
- capabilities: z.object({
- temperature: z.boolean(),
- reasoning: z.boolean(),
- attachment: z.boolean(),
- toolcall: z.boolean(),
- input: z.object({
- text: z.boolean(),
- audio: z.boolean(),
- image: z.boolean(),
- video: z.boolean(),
- pdf: z.boolean(),
- }),
- output: z.object({
- text: z.boolean(),
- audio: z.boolean(),
- image: z.boolean(),
- video: z.boolean(),
- pdf: z.boolean(),
- }),
- interleaved: z.union([
- z.boolean(),
- z.object({
- field: z.enum(["reasoning_content", "reasoning_details"]),
- }),
- ]),
- }),
- cost: z.object({
- input: z.number(),
- output: z.number(),
- cache: z.object({
- read: z.number(),
- write: z.number(),
- }),
- experimentalOver200K: z
- .object({
- input: z.number(),
- output: z.number(),
- cache: z.object({
- read: z.number(),
- write: z.number(),
- }),
- })
- .optional(),
- }),
- limit: z.object({
- context: z.number(),
- input: z.number().optional(),
- output: z.number(),
- }),
- status: z.enum(["alpha", "beta", "deprecated", "active"]),
- options: z.record(z.string(), z.any()),
- headers: z.record(z.string(), z.string()),
- release_date: z.string(),
- variants: z.record(z.string(), z.record(z.string(), z.any())).optional(),
- })
- .meta({
- ref: "Model",
- })
- export type Model = z.infer<typeof Model>
- export const Info = z
- .object({
- id: z.string(),
- name: z.string(),
- source: z.enum(["env", "config", "custom", "api"]),
- env: z.string().array(),
- key: z.string().optional(),
- options: z.record(z.string(), z.any()),
- models: z.record(z.string(), Model),
- })
- .meta({
- ref: "Provider",
- })
- export type Info = z.infer<typeof Info>
- function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model): Model {
- const m: Model = {
- id: model.id,
- providerID: provider.id,
- name: model.name,
- family: model.family,
- api: {
- id: model.id,
- url: provider.api!,
- npm: iife(() => {
- if (provider.id.startsWith("github-copilot")) return "@ai-sdk/github-copilot"
- return model.provider?.npm ?? provider.npm ?? "@ai-sdk/openai-compatible"
- }),
- },
- status: model.status ?? "active",
- headers: model.headers ?? {},
- options: model.options ?? {},
- cost: {
- input: model.cost?.input ?? 0,
- output: model.cost?.output ?? 0,
- cache: {
- read: model.cost?.cache_read ?? 0,
- write: model.cost?.cache_write ?? 0,
- },
- experimentalOver200K: model.cost?.context_over_200k
- ? {
- cache: {
- read: model.cost.context_over_200k.cache_read ?? 0,
- write: model.cost.context_over_200k.cache_write ?? 0,
- },
- input: model.cost.context_over_200k.input,
- output: model.cost.context_over_200k.output,
- }
- : undefined,
- },
- limit: {
- context: model.limit.context,
- input: model.limit.input,
- output: model.limit.output,
- },
- capabilities: {
- temperature: model.temperature,
- reasoning: model.reasoning,
- attachment: model.attachment,
- toolcall: model.tool_call,
- input: {
- text: model.modalities?.input?.includes("text") ?? false,
- audio: model.modalities?.input?.includes("audio") ?? false,
- image: model.modalities?.input?.includes("image") ?? false,
- video: model.modalities?.input?.includes("video") ?? false,
- pdf: model.modalities?.input?.includes("pdf") ?? false,
- },
- output: {
- text: model.modalities?.output?.includes("text") ?? false,
- audio: model.modalities?.output?.includes("audio") ?? false,
- image: model.modalities?.output?.includes("image") ?? false,
- video: model.modalities?.output?.includes("video") ?? false,
- pdf: model.modalities?.output?.includes("pdf") ?? false,
- },
- interleaved: model.interleaved ?? false,
- },
- release_date: model.release_date,
- variants: {},
- }
- m.variants = mapValues(ProviderTransform.variants(m), (v) => v)
- return m
- }
- export function fromModelsDevProvider(provider: ModelsDev.Provider): Info {
- return {
- id: provider.id,
- source: "custom",
- name: provider.name,
- env: provider.env ?? [],
- options: {},
- models: mapValues(provider.models, (model) => fromModelsDevModel(provider, model)),
- }
- }
- const state = Instance.state(async () => {
- using _ = log.time("state")
- const config = await Config.get()
- const modelsDev = await ModelsDev.get()
- const database = mapValues(modelsDev, fromModelsDevProvider)
- const disabled = new Set(config.disabled_providers ?? [])
- const enabled = config.enabled_providers ? new Set(config.enabled_providers) : null
- function isProviderAllowed(providerID: string): boolean {
- if (enabled && !enabled.has(providerID)) return false
- if (disabled.has(providerID)) return false
- return true
- }
- const providers: { [providerID: string]: Info } = {}
- const languages = new Map<string, LanguageModelV2>()
- const modelLoaders: {
- [providerID: string]: CustomModelLoader
- } = {}
- const sdk = new Map<number, SDK>()
- log.info("init")
- const configProviders = Object.entries(config.provider ?? {})
- // Add GitHub Copilot Enterprise provider that inherits from GitHub Copilot
- if (database["github-copilot"]) {
- const githubCopilot = database["github-copilot"]
- database["github-copilot-enterprise"] = {
- ...githubCopilot,
- id: "github-copilot-enterprise",
- name: "GitHub Copilot Enterprise",
- models: mapValues(githubCopilot.models, (model) => ({
- ...model,
- providerID: "github-copilot-enterprise",
- })),
- }
- }
- function mergeProvider(providerID: string, provider: Partial<Info>) {
- const existing = providers[providerID]
- if (existing) {
- // @ts-expect-error
- providers[providerID] = mergeDeep(existing, provider)
- return
- }
- const match = database[providerID]
- if (!match) return
- // @ts-expect-error
- providers[providerID] = mergeDeep(match, provider)
- }
- // extend database from config
- for (const [providerID, provider] of configProviders) {
- const existing = database[providerID]
- const parsed: Info = {
- id: providerID,
- name: provider.name ?? existing?.name ?? providerID,
- env: provider.env ?? existing?.env ?? [],
- options: mergeDeep(existing?.options ?? {}, provider.options ?? {}),
- source: "config",
- models: existing?.models ?? {},
- }
- for (const [modelID, model] of Object.entries(provider.models ?? {})) {
- const existingModel = parsed.models[model.id ?? modelID]
- const name = iife(() => {
- if (model.name) return model.name
- if (model.id && model.id !== modelID) return modelID
- return existingModel?.name ?? modelID
- })
- const parsedModel: Model = {
- id: modelID,
- api: {
- id: model.id ?? existingModel?.api.id ?? modelID,
- npm:
- model.provider?.npm ??
- provider.npm ??
- existingModel?.api.npm ??
- modelsDev[providerID]?.npm ??
- "@ai-sdk/openai-compatible",
- url: provider?.api ?? existingModel?.api.url ?? modelsDev[providerID]?.api,
- },
- status: model.status ?? existingModel?.status ?? "active",
- name,
- providerID,
- capabilities: {
- temperature: model.temperature ?? existingModel?.capabilities.temperature ?? false,
- reasoning: model.reasoning ?? existingModel?.capabilities.reasoning ?? false,
- attachment: model.attachment ?? existingModel?.capabilities.attachment ?? false,
- toolcall: model.tool_call ?? existingModel?.capabilities.toolcall ?? true,
- input: {
- text: model.modalities?.input?.includes("text") ?? existingModel?.capabilities.input.text ?? true,
- audio: model.modalities?.input?.includes("audio") ?? existingModel?.capabilities.input.audio ?? false,
- image: model.modalities?.input?.includes("image") ?? existingModel?.capabilities.input.image ?? false,
- video: model.modalities?.input?.includes("video") ?? existingModel?.capabilities.input.video ?? false,
- pdf: model.modalities?.input?.includes("pdf") ?? existingModel?.capabilities.input.pdf ?? false,
- },
- output: {
- text: model.modalities?.output?.includes("text") ?? existingModel?.capabilities.output.text ?? true,
- audio: model.modalities?.output?.includes("audio") ?? existingModel?.capabilities.output.audio ?? false,
- image: model.modalities?.output?.includes("image") ?? existingModel?.capabilities.output.image ?? false,
- video: model.modalities?.output?.includes("video") ?? existingModel?.capabilities.output.video ?? false,
- pdf: model.modalities?.output?.includes("pdf") ?? existingModel?.capabilities.output.pdf ?? false,
- },
- interleaved: model.interleaved ?? false,
- },
- cost: {
- input: model?.cost?.input ?? existingModel?.cost?.input ?? 0,
- output: model?.cost?.output ?? existingModel?.cost?.output ?? 0,
- cache: {
- read: model?.cost?.cache_read ?? existingModel?.cost?.cache.read ?? 0,
- write: model?.cost?.cache_write ?? existingModel?.cost?.cache.write ?? 0,
- },
- },
- options: mergeDeep(existingModel?.options ?? {}, model.options ?? {}),
- limit: {
- context: model.limit?.context ?? existingModel?.limit?.context ?? 0,
- output: model.limit?.output ?? existingModel?.limit?.output ?? 0,
- },
- headers: mergeDeep(existingModel?.headers ?? {}, model.headers ?? {}),
- family: model.family ?? existingModel?.family ?? "",
- release_date: model.release_date ?? existingModel?.release_date ?? "",
- variants: {},
- }
- const merged = mergeDeep(ProviderTransform.variants(parsedModel), model.variants ?? {})
- parsedModel.variants = mapValues(
- pickBy(merged, (v) => !v.disabled),
- (v) => omit(v, ["disabled"]),
- )
- parsed.models[modelID] = parsedModel
- }
- database[providerID] = parsed
- }
- // load env
- const env = Env.all()
- for (const [providerID, provider] of Object.entries(database)) {
- if (disabled.has(providerID)) continue
- const apiKey = provider.env.map((item) => env[item]).find(Boolean)
- if (!apiKey) continue
- mergeProvider(providerID, {
- source: "env",
- key: provider.env.length === 1 ? apiKey : undefined,
- })
- }
- // load apikeys
- for (const [providerID, provider] of Object.entries(await Auth.all())) {
- if (disabled.has(providerID)) continue
- if (provider.type === "api") {
- mergeProvider(providerID, {
- source: "api",
- key: provider.key,
- })
- }
- }
- for (const plugin of await Plugin.list()) {
- if (!plugin.auth) continue
- const providerID = plugin.auth.provider
- if (disabled.has(providerID)) continue
- // For github-copilot plugin, check if auth exists for either github-copilot or github-copilot-enterprise
- let hasAuth = false
- const auth = await Auth.get(providerID)
- if (auth) hasAuth = true
- // Special handling for github-copilot: also check for enterprise auth
- if (providerID === "github-copilot" && !hasAuth) {
- const enterpriseAuth = await Auth.get("github-copilot-enterprise")
- if (enterpriseAuth) hasAuth = true
- }
- if (!hasAuth) continue
- if (!plugin.auth.loader) continue
- // Load for the main provider if auth exists
- if (auth) {
- const options = await plugin.auth.loader(() => Auth.get(providerID) as any, database[plugin.auth.provider])
- const opts = options ?? {}
- const patch: Partial<Info> = providers[providerID] ? { options: opts } : { source: "custom", options: opts }
- mergeProvider(providerID, patch)
- }
- // If this is github-copilot plugin, also register for github-copilot-enterprise if auth exists
- if (providerID === "github-copilot") {
- const enterpriseProviderID = "github-copilot-enterprise"
- if (!disabled.has(enterpriseProviderID)) {
- const enterpriseAuth = await Auth.get(enterpriseProviderID)
- if (enterpriseAuth) {
- const enterpriseOptions = await plugin.auth.loader(
- () => Auth.get(enterpriseProviderID) as any,
- database[enterpriseProviderID],
- )
- const opts = enterpriseOptions ?? {}
- const patch: Partial<Info> = providers[enterpriseProviderID]
- ? { options: opts }
- : { source: "custom", options: opts }
- mergeProvider(enterpriseProviderID, patch)
- }
- }
- }
- }
- for (const [providerID, fn] of Object.entries(CUSTOM_LOADERS)) {
- if (disabled.has(providerID)) continue
- const data = database[providerID]
- if (!data) {
- log.error("Provider does not exist in model list " + providerID)
- continue
- }
- const result = await fn(data)
- if (result && (result.autoload || providers[providerID])) {
- if (result.getModel) modelLoaders[providerID] = result.getModel
- const opts = result.options ?? {}
- const patch: Partial<Info> = providers[providerID] ? { options: opts } : { source: "custom", options: opts }
- mergeProvider(providerID, patch)
- }
- }
- // load config
- for (const [providerID, provider] of configProviders) {
- const partial: Partial<Info> = { source: "config" }
- if (provider.env) partial.env = provider.env
- if (provider.name) partial.name = provider.name
- if (provider.options) partial.options = provider.options
- mergeProvider(providerID, partial)
- }
- for (const [providerID, provider] of Object.entries(providers)) {
- if (!isProviderAllowed(providerID)) {
- delete providers[providerID]
- continue
- }
- const configProvider = config.provider?.[providerID]
- for (const [modelID, model] of Object.entries(provider.models)) {
- model.api.id = model.api.id ?? model.id ?? modelID
- if (modelID === "gpt-5-chat-latest" || (providerID === "openrouter" && modelID === "openai/gpt-5-chat"))
- delete provider.models[modelID]
- if (model.status === "alpha" && !Flag.OPENCODE_ENABLE_EXPERIMENTAL_MODELS) delete provider.models[modelID]
- if (model.status === "deprecated") delete provider.models[modelID]
- if (
- (configProvider?.blacklist && configProvider.blacklist.includes(modelID)) ||
- (configProvider?.whitelist && !configProvider.whitelist.includes(modelID))
- )
- delete provider.models[modelID]
- // Filter out disabled variants from config
- const configVariants = configProvider?.models?.[modelID]?.variants
- if (configVariants && model.variants) {
- const merged = mergeDeep(model.variants, configVariants)
- model.variants = mapValues(
- pickBy(merged, (v) => !v.disabled),
- (v) => omit(v, ["disabled"]),
- )
- }
- }
- if (Object.keys(provider.models).length === 0) {
- delete providers[providerID]
- continue
- }
- log.info("found", { providerID })
- }
- return {
- models: languages,
- providers,
- sdk,
- modelLoaders,
- }
- })
- export async function list() {
- return state().then((state) => state.providers)
- }
- async function getSDK(model: Model) {
- try {
- using _ = log.time("getSDK", {
- providerID: model.providerID,
- })
- const s = await state()
- const provider = s.providers[model.providerID]
- const options = { ...provider.options }
- if (model.api.npm.includes("@ai-sdk/openai-compatible") && options["includeUsage"] !== false) {
- options["includeUsage"] = true
- }
- if (!options["baseURL"]) options["baseURL"] = model.api.url
- if (options["apiKey"] === undefined && provider.key) options["apiKey"] = provider.key
- if (model.headers)
- options["headers"] = {
- ...options["headers"],
- ...model.headers,
- }
- const key = Bun.hash.xxHash32(JSON.stringify({ npm: model.api.npm, options }))
- const existing = s.sdk.get(key)
- if (existing) return existing
- const customFetch = options["fetch"]
- options["fetch"] = async (input: any, init?: BunFetchRequestInit) => {
- // Preserve custom fetch if it exists, wrap it with timeout logic
- const fetchFn = customFetch ?? fetch
- const opts = init ?? {}
- if (options["timeout"] !== undefined && options["timeout"] !== null) {
- const signals: AbortSignal[] = []
- if (opts.signal) signals.push(opts.signal)
- if (options["timeout"] !== false) signals.push(AbortSignal.timeout(options["timeout"]))
- const combined = signals.length > 1 ? AbortSignal.any(signals) : signals[0]
- opts.signal = combined
- }
- // Strip openai itemId metadata following what codex does
- // Codex uses #[serde(skip_serializing)] on id fields for all item types:
- // Message, Reasoning, FunctionCall, LocalShellCall, CustomToolCall, WebSearchCall
- // IDs are only re-attached for Azure with store=true
- if (model.api.npm === "@ai-sdk/openai" && opts.body && opts.method === "POST") {
- const body = JSON.parse(opts.body as string)
- const isAzure = model.providerID.includes("azure")
- const keepIds = isAzure && body.store === true
- if (!keepIds && Array.isArray(body.input)) {
- for (const item of body.input) {
- if ("id" in item) {
- delete item.id
- }
- }
- opts.body = JSON.stringify(body)
- }
- }
- return fetchFn(input, {
- ...opts,
- // @ts-ignore see here: https://github.com/oven-sh/bun/issues/16682
- timeout: false,
- })
- }
- // Special case: google-vertex-anthropic uses a subpath import
- const bundledKey =
- model.providerID === "google-vertex-anthropic" ? "@ai-sdk/google-vertex/anthropic" : model.api.npm
- const bundledFn = BUNDLED_PROVIDERS[bundledKey]
- if (bundledFn) {
- log.info("using bundled provider", { providerID: model.providerID, pkg: bundledKey })
- const loaded = bundledFn({
- name: model.providerID,
- ...options,
- })
- s.sdk.set(key, loaded)
- return loaded as SDK
- }
- let installedPath: string
- if (!model.api.npm.startsWith("file://")) {
- installedPath = await BunProc.install(model.api.npm, "latest")
- } else {
- log.info("loading local provider", { pkg: model.api.npm })
- installedPath = model.api.npm
- }
- const mod = await import(installedPath)
- const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!]
- const loaded = fn({
- name: model.providerID,
- ...options,
- })
- s.sdk.set(key, loaded)
- return loaded as SDK
- } catch (e) {
- throw new InitError({ providerID: model.providerID }, { cause: e })
- }
- }
- export async function getProvider(providerID: string) {
- return state().then((s) => s.providers[providerID])
- }
- export async function getModel(providerID: string, modelID: string) {
- const s = await state()
- const provider = s.providers[providerID]
- if (!provider) {
- const availableProviders = Object.keys(s.providers)
- const matches = fuzzysort.go(providerID, availableProviders, { limit: 3, threshold: -10000 })
- const suggestions = matches.map((m) => m.target)
- throw new ModelNotFoundError({ providerID, modelID, suggestions })
- }
- const info = provider.models[modelID]
- if (!info) {
- const availableModels = Object.keys(provider.models)
- const matches = fuzzysort.go(modelID, availableModels, { limit: 3, threshold: -10000 })
- const suggestions = matches.map((m) => m.target)
- throw new ModelNotFoundError({ providerID, modelID, suggestions })
- }
- return info
- }
- export async function getLanguage(model: Model): Promise<LanguageModelV2> {
- const s = await state()
- const key = `${model.providerID}/${model.id}`
- if (s.models.has(key)) return s.models.get(key)!
- const provider = s.providers[model.providerID]
- const sdk = await getSDK(model)
- try {
- const language = s.modelLoaders[model.providerID]
- ? await s.modelLoaders[model.providerID](sdk, model.api.id, provider.options)
- : sdk.languageModel(model.api.id)
- s.models.set(key, language)
- return language
- } catch (e) {
- if (e instanceof NoSuchModelError)
- throw new ModelNotFoundError(
- {
- modelID: model.id,
- providerID: model.providerID,
- },
- { cause: e },
- )
- throw e
- }
- }
- export async function closest(providerID: string, query: string[]) {
- const s = await state()
- const provider = s.providers[providerID]
- if (!provider) return undefined
- for (const item of query) {
- for (const modelID of Object.keys(provider.models)) {
- if (modelID.includes(item))
- return {
- providerID,
- modelID,
- }
- }
- }
- }
- export async function getSmallModel(providerID: string) {
- const cfg = await Config.get()
- if (cfg.small_model) {
- const parsed = parseModel(cfg.small_model)
- return getModel(parsed.providerID, parsed.modelID)
- }
- const provider = await state().then((state) => state.providers[providerID])
- if (provider) {
- let priority = [
- "claude-haiku-4-5",
- "claude-haiku-4.5",
- "3-5-haiku",
- "3.5-haiku",
- "gemini-3-flash",
- "gemini-2.5-flash",
- "gpt-5-nano",
- ]
- if (providerID.startsWith("opencode")) {
- priority = ["gpt-5-nano"]
- }
- if (providerID.startsWith("github-copilot")) {
- // prioritize free models for github copilot
- priority = ["gpt-5-mini", "claude-haiku-4.5", ...priority]
- }
- for (const item of priority) {
- for (const model of Object.keys(provider.models)) {
- if (model.includes(item)) return getModel(providerID, model)
- }
- }
- }
- // Check if opencode provider is available before using it
- const opencodeProvider = await state().then((state) => state.providers["opencode"])
- if (opencodeProvider && opencodeProvider.models["gpt-5-nano"]) {
- return getModel("opencode", "gpt-5-nano")
- }
- return undefined
- }
- const priority = ["gpt-5", "claude-sonnet-4", "big-pickle", "gemini-3-pro"]
- export function sort(models: Model[]) {
- return sortBy(
- models,
- [(model) => priority.findIndex((filter) => model.id.includes(filter)), "desc"],
- [(model) => (model.id.includes("latest") ? 0 : 1), "asc"],
- [(model) => model.id, "desc"],
- )
- }
- export async function defaultModel() {
- const cfg = await Config.get()
- if (cfg.model) return parseModel(cfg.model)
- const provider = await list()
- .then((val) => Object.values(val))
- .then((x) => x.find((p) => !cfg.provider || Object.keys(cfg.provider).includes(p.id)))
- if (!provider) throw new Error("no providers found")
- const [model] = sort(Object.values(provider.models))
- if (!model) throw new Error("no models found")
- return {
- providerID: provider.id,
- modelID: model.id,
- }
- }
- export function parseModel(model: string) {
- const [providerID, ...rest] = model.split("/")
- return {
- providerID: providerID,
- modelID: rest.join("/"),
- }
- }
- export const ModelNotFoundError = NamedError.create(
- "ProviderModelNotFoundError",
- z.object({
- providerID: z.string(),
- modelID: z.string(),
- suggestions: z.array(z.string()).optional(),
- }),
- )
- export const InitError = NamedError.create(
- "ProviderInitError",
- z.object({
- providerID: z.string(),
- }),
- )
- }
|