Frank 3 месяцев назад
Родитель
Сommit
7283bfa480

+ 12 - 5
packages/console/app/src/routes/zen/util/handler.ts

@@ -15,6 +15,7 @@ import { logger } from "./logger"
 import { AuthError, CreditsError, MonthlyLimitError, UserLimitError, ModelError, RateLimitError } from "./error"
 import { createBodyConverter, createStreamPartConverter, createResponseConverter } from "./provider/provider"
 import { anthropicHelper } from "./provider/anthropic"
+import { googleHelper } from "./provider/google"
 import { openaiHelper } from "./provider/openai"
 import { oaCompatHelper } from "./provider/openai-compatible"
 import { createRateLimiter } from "./rateLimiter"
@@ -30,6 +31,8 @@ export async function handler(
   opts: {
     format: ZenData.Format
     parseApiKey: (headers: Headers) => string | undefined
+    parseModel: (url: string, body: any) => string
+    parseIsStream: (url: string, body: any) => boolean
   },
 ) {
   type AuthInfo = Awaited<ReturnType<typeof authenticate>>
@@ -43,15 +46,18 @@ export async function handler(
   ]
 
   try {
+    const url = input.request.url
     const body = await input.request.json()
     const ip = input.request.headers.get("x-real-ip") ?? ""
+    const model = opts.parseModel(url, body)
+    const isStream = opts.parseIsStream(url, body)
     logger.metric({
-      is_tream: !!body.stream,
+      is_tream: isStream,
       session: input.request.headers.get("x-opencode-session"),
       request: input.request.headers.get("x-opencode-request"),
     })
     const zenData = ZenData.list()
-    const modelInfo = validateModel(zenData, body.model)
+    const modelInfo = validateModel(zenData, model)
     const rateLimiter = createRateLimiter(modelInfo.id, modelInfo.rateLimit, ip)
     await rateLimiter?.check()
 
@@ -64,7 +70,7 @@ export async function handler(
       logger.metric({ provider: providerInfo.id })
 
       const startTimestamp = Date.now()
-      const reqUrl = providerInfo.modifyUrl(providerInfo.api)
+      const reqUrl = providerInfo.modifyUrl(providerInfo.api, providerInfo.model, isStream)
       const reqBody = JSON.stringify(
         providerInfo.modifyBody({
           ...createBodyConverter(opts.format, providerInfo.format)(body),
@@ -114,7 +120,7 @@ export async function handler(
     logger.debug("STATUS: " + res.status + " " + res.statusText)
 
     // Handle non-streaming response
-    if (!body.stream) {
+    if (!isStream) {
       const responseConverter = createResponseConverter(providerInfo.format, opts.format)
       const json = await res.json()
       const body = JSON.stringify(responseConverter(json))
@@ -169,7 +175,7 @@ export async function handler(
               responseLength += value.length
               buffer += decoder.decode(value, { stream: true })
 
-              const parts = buffer.split("\n\n")
+              const parts = buffer.split(providerInfo.streamSeparator)
               buffer = parts.pop() ?? ""
 
               for (let part of parts) {
@@ -283,6 +289,7 @@ export async function handler(
       ...(() => {
         const format = zenData.providers[provider.id].format
         if (format === "anthropic") return anthropicHelper
+        if (format === "google") return googleHelper
         if (format === "openai") return openaiHelper
         return oaCompatHelper
       })(),

+ 1 - 0
packages/console/app/src/routes/zen/util/provider/anthropic.ts

@@ -30,6 +30,7 @@ export const anthropicHelper = {
       service_tier: "standard_only",
     }
   },
+  streamSeparator: "\n\n",
   createUsageParser: () => {
     let usage: Usage
 

+ 74 - 0
packages/console/app/src/routes/zen/util/provider/google.ts

@@ -0,0 +1,74 @@
+import { ProviderHelper } from "./provider"
+
+/*
+{
+  promptTokenCount: 11453,
+  candidatesTokenCount: 71,
+  totalTokenCount: 11625,
+  cachedContentTokenCount: 8100,
+  promptTokensDetails: [
+    {modality: "TEXT",tokenCount: 11453}
+  ],
+  cacheTokensDetails: [
+    {modality: "TEXT",tokenCount: 8100}
+  ],
+  thoughtsTokenCount: 101
+}
+*/
+
+type Usage = {
+  promptTokenCount?: number
+  candidatesTokenCount?: number
+  totalTokenCount?: number
+  cachedContentTokenCount?: number
+  promptTokensDetails?: { modality: string; tokenCount: number }[]
+  cacheTokensDetails?: { modality: string; tokenCount: number }[]
+  thoughtsTokenCount?: number
+}
+
+export const googleHelper = {
+  format: "google",
+  modifyUrl: (providerApi: string, model?: string, isStream?: boolean) =>
+    `${providerApi}/models/${model}:${isStream ? "streamGenerateContent?alt=sse" : "generateContent"}`,
+  modifyHeaders: (headers: Headers, body: Record<string, any>, apiKey: string) => {
+    headers.set("x-goog-api-key", apiKey)
+  },
+  modifyBody: (body: Record<string, any>) => {
+    return body
+  },
+  streamSeparator: "\r\n\r\n",
+  createUsageParser: () => {
+    let usage: Usage
+
+    return {
+      parse: (chunk: string) => {
+        if (!chunk.startsWith("data: ")) return
+
+        let json
+        try {
+          json = JSON.parse(chunk.slice(6)) as { usageMetadata?: Usage }
+        } catch (e) {
+          return
+        }
+
+        if (!json.usageMetadata) return
+        usage = json.usageMetadata
+      },
+      retrieve: () => usage,
+    }
+  },
+  normalizeUsage: (usage: Usage) => {
+    const inputTokens = usage.promptTokenCount ?? 0
+    const outputTokens = usage.candidatesTokenCount ?? 0
+    const reasoningTokens = usage.thoughtsTokenCount ?? 0
+    const cacheReadTokens = usage.cachedContentTokenCount ?? 0
+    return {
+      inputTokens: inputTokens - cacheReadTokens,
+      outputTokens,
+      reasoningTokens,
+      cacheReadTokens,
+      cacheWrite5mTokens: undefined,
+      cacheWrite1hTokens: undefined,
+    }
+  },
+} satisfies ProviderHelper

+ 1 - 0
packages/console/app/src/routes/zen/util/provider/openai-compatible.ts

@@ -33,6 +33,7 @@ export const oaCompatHelper = {
       ...(body.stream ? { stream_options: { include_usage: true } } : {}),
     }
   },
+  streamSeparator: "\n\n",
   createUsageParser: () => {
     let usage: Usage
 

+ 1 - 0
packages/console/app/src/routes/zen/util/provider/openai.ts

@@ -21,6 +21,7 @@ export const openaiHelper = {
   modifyBody: (body: Record<string, any>) => {
     return body
   },
+  streamSeparator: "\n\n",
   createUsageParser: () => {
     let usage: Usage
 

+ 2 - 1
packages/console/app/src/routes/zen/util/provider/provider.ts

@@ -26,9 +26,10 @@ import {
 
 export type ProviderHelper = {
   format: ZenData.Format
-  modifyUrl: (providerApi: string) => string
+  modifyUrl: (providerApi: string, model?: string, isStream?: boolean) => string
   modifyHeaders: (headers: Headers, body: Record<string, any>, apiKey: string) => void
   modifyBody: (body: Record<string, any>) => Record<string, any>
+  streamSeparator: string
   createUsageParser: () => {
     parse: (chunk: string) => void
     retrieve: () => any

+ 2 - 0
packages/console/app/src/routes/zen/v1/chat/completions.ts

@@ -5,5 +5,7 @@ export function POST(input: APIEvent) {
   return handler(input, {
     format: "oa-compat",
     parseApiKey: (headers: Headers) => headers.get("authorization")?.split(" ")[1],
+    parseModel: (url: string, body: any) => body.model,
+    parseIsStream: (url: string, body: any) => !!body.stream,
   })
 }

+ 2 - 0
packages/console/app/src/routes/zen/v1/messages.ts

@@ -5,5 +5,7 @@ export function POST(input: APIEvent) {
   return handler(input, {
     format: "anthropic",
     parseApiKey: (headers: Headers) => headers.get("x-api-key") ?? undefined,
+    parseModel: (url: string, body: any) => body.model,
+    parseIsStream: (url: string, body: any) => !!body.stream,
   })
 }

+ 13 - 0
packages/console/app/src/routes/zen/v1/models/[model].ts

@@ -0,0 +1,13 @@
+import type { APIEvent } from "@solidjs/start/server"
+import { handler } from "~/routes/zen/util/handler"
+
+export function POST(input: APIEvent) {
+  return handler(input, {
+    format: "google",
+    parseApiKey: (headers: Headers) => headers.get("x-goog-api-key") ?? undefined,
+    parseModel: (url: string, body: any) => url.split("/").pop()?.split(":")?.[0] ?? "",
+    parseIsStream: (url: string, body: any) =>
+      // ie. url: https://opencode.ai/zen/v1/models/gemini-3-pro:streamGenerateContent?alt=sse'
+      url.split("/").pop()?.split(":")?.[1]?.startsWith("streamGenerateContent") ?? false,
+  })
+}

+ 2 - 0
packages/console/app/src/routes/zen/v1/responses.ts

@@ -5,5 +5,7 @@ export function POST(input: APIEvent) {
   return handler(input, {
     format: "openai",
     parseApiKey: (headers: Headers) => headers.get("authorization")?.split(" ")[1],
+    parseModel: (url: string, body: any) => body.model,
+    parseIsStream: (url: string, body: any) => !!body.stream,
   })
 }

+ 1 - 1
packages/console/core/src/model.ts

@@ -8,7 +8,7 @@ import { Actor } from "./actor"
 import { Resource } from "@opencode-ai/console-resource"
 
 export namespace ZenData {
-  const FormatSchema = z.enum(["anthropic", "openai", "oa-compat"])
+  const FormatSchema = z.enum(["anthropic", "google", "openai", "oa-compat"])
   export type Format = z.infer<typeof FormatSchema>
 
   const ModelCostSchema = z.object({