Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
zuisong committed Jan 12, 2024
1 parent ab0608b commit 76c9aeb
Show file tree
Hide file tree
Showing 7 changed files with 33 additions and 30 deletions.
13 changes: 5 additions & 8 deletions src/app.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,19 @@ import { cors } from "hono/cors"
import { getRuntimeKey } from "hono/adapter"
import { logger } from "hono/logger"
import { timing } from "hono/timing"
import { chatProxyHandler } from "./chat/complete/ChatProxyHandler.ts"
import { chatProxyHandler } from "./v1/chat/completions/ChatProxyHandler.ts"
import { Logger, gen_logger } from "./log.ts"

const openAiRoute = new Hono<{ Variables: { log: Logger } }>()
.use("*", async (c, next) => {
export const app = new Hono({ strict: true })
.use("*", cors(), timing(), logger())
.use("*", async (c: ContextWithLogger, next) => {
const logger = gen_logger(crypto.randomUUID())
c.set("log", logger)
await next()
c.set("log", undefined)
})
.post("/v1/chat/completions", chatProxyHandler)

export const app = new Hono({ strict: true })
.use("*", cors(), timing(), logger())
.options("*", (c) => c.text("", 204))
.route("/", openAiRoute)
.get("/", (c) => c.text(`Hello Gemini-OpenAI-Proxy from ${getRuntimeKey()}!`))
.post("/v1/chat/completions", chatProxyHandler)

export type ContextWithLogger = Context<{ Variables: { log: Logger } }>
4 changes: 2 additions & 2 deletions src/log.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@ const currentlevel = LogLevel.debug
export function gen_logger(id: string) {
return mapValues(LogLevel, (value, name) => {
return (msg: Any) => {
out_func(name, value, `${id} ${msg}`)
outFunc(name, value, `${id} ${msg}`)
}
})
}

export type Logger = ReturnType<typeof gen_logger>

function out_func(levelName: string, levelValue: number, msg: string) {
function outFunc(levelName: string, levelValue: number, msg: string) {
if (levelValue > currentlevel) {
return
}
Expand Down
16 changes: 10 additions & 6 deletions src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ function parseBase64(base64: string): Part {
}
}

export function openAIMessageToGeminiMessage(
messages: Array<OpenAI.Chat.ChatCompletionMessageParam>,
): Array<Content> {
export function openAiMessageToGeminiMessage(
messages: OpenAI.Chat.ChatCompletionMessageParam[],
): Content[] {
const result: Content[] = messages.flatMap(({ role, content }) => {
if (role === "system") {
return [
Expand All @@ -51,12 +51,16 @@ export function openAIMessageToGeminiMessage(
}

function hasImageMessage(
messages: Array<OpenAI.Chat.ChatCompletionMessageParam>,
messages: OpenAI.Chat.ChatCompletionMessageParam[],
): boolean {
return messages.some((msg) => {
const content = msg.content
if (content == null) return false
if (typeof content === "string") return false
if (content == null) {
return false
}
if (typeof content === "string") {
return false
}
return content.some((it) => it.type === "image_url")
})
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import { GoogleGenerativeAI } from "@google/generative-ai"
import type { Handler } from "hono"
import type { OpenAI } from "openai"
import { getToken } from "../../utils.ts"
import { getToken } from "../../../utils.ts"
import { nonStreamingChatProxyHandler } from "./NonStreamingChatProxyHandler.ts"
import { streamingChatProxyHandler } from "./StreamingChatProxyHandler.ts"
import { ContextWithLogger } from "../../app.ts"
import { ContextWithLogger } from "../../../app.ts"

export const chatProxyHandler: Handler = async (c: ContextWithLogger) => {
const log = c.get("log")
const log = c.var.log

const req = await c.req.json<OpenAI.Chat.ChatCompletionCreateParams>()
log.debug(JSON.stringify(req))
Expand Down
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
import type { OpenAI } from "openai"
import { genModel, openAIMessageToGeminiMessage } from "../../utils.ts"
import { genModel, openAiMessageToGeminiMessage } from "../../../utils.ts"
import { ChatProxyHandlerType } from "./ChatProxyHandler.ts"

export const nonStreamingChatProxyHandler: ChatProxyHandlerType = async (
c,
req,
genAi,
) => {
const log = c.get("log")
const log = c.var.log
const model = genModel(genAi, req)
const geminiResp: string = await model
.generateContent({
contents: openAIMessageToGeminiMessage(req.messages),
contents: openAiMessageToGeminiMessage(req.messages),
})
.then((it) => it.response.text())
.catch((err) => err?.message ?? err.toString())
Expand Down
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
import type { OpenAI } from "openai"
import { streamSSE } from "hono/streaming"
import { genModel, openAIMessageToGeminiMessage } from "../../utils.ts"
import { genModel, openAiMessageToGeminiMessage } from "../../../utils.ts"
import { ChatProxyHandlerType } from "./ChatProxyHandler.ts"

export const streamingChatProxyHandler: ChatProxyHandlerType = async (
c,
req,
genAi,
) => {
const log = c.get("log")
const log = c.var.log
const model = genModel(genAi, req)

const genOpenAIResp = (content: string, stop: boolean) =>
const genOpenAiResp = (content: string, stop: boolean) =>
({
id: "chatcmpl-abc123",
object: "chat.completion.chunk",
Expand All @@ -29,23 +29,23 @@ export const streamingChatProxyHandler: ChatProxyHandlerType = async (
return streamSSE(c, async (sseStream) => {
await model
.generateContentStream({
contents: openAIMessageToGeminiMessage(req.messages),
contents: openAiMessageToGeminiMessage(req.messages),
})
.then(async ({ stream, response }) => {
for await (const { text } of stream) {
await sseStream.writeSSE({
data: JSON.stringify(genOpenAIResp(text(), false)),
data: JSON.stringify(genOpenAiResp(text(), false)),
})
}
await sseStream.writeSSE({
data: JSON.stringify(genOpenAIResp("", true)),
data: JSON.stringify(genOpenAiResp("", true)),
})
const geminiResult = (await response).text()
log.info(JSON.stringify(geminiResult))
})
.catch(async (e) => {
await sseStream.writeSSE({
data: JSON.stringify(genOpenAIResp(e.toString(), true)),
data: JSON.stringify(genOpenAiResp(e.toString(), true)),
})
log.info(e)
})
Expand Down
4 changes: 3 additions & 1 deletion test/chat-completion_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,7 @@ test("test", async () => {
})

test("open ai client test", () => {
if (typeof globalThis.Deno === "undefined") return
if (typeof globalThis.Deno === "undefined") {
return
}
})

0 comments on commit 76c9aeb

Please sign in to comment.