diff --git a/main_bun.ts b/main_bun.ts index d0c0f55..1049a43 100644 --- a/main_bun.ts +++ b/main_bun.ts @@ -1,4 +1,5 @@ import { app } from "./src/app.ts" +// @ts-ignore supress warning Bun.serve({ port: 8000, fetch: app.fetch, diff --git a/main_deno.ts b/main_deno.ts index b26cb41..7cf7de1 100644 --- a/main_deno.ts +++ b/main_deno.ts @@ -1,4 +1,4 @@ import { app } from "./src/app.ts" -// @ts-ignore supress idea warning +// @ts-ignore supress warning Deno.serve({ port: 8000 }, app.fetch) diff --git a/src/app.ts b/src/app.ts index 21ec1a2..67bd2cb 100644 --- a/src/app.ts +++ b/src/app.ts @@ -18,25 +18,20 @@ export const app = new Hono({ strict: true }) .options("*", (c) => c.text("", 204)) .get("/", (c) => { const origin = new URL(c.req.url).origin - return c.html( - `
- + return c.text(` Hello Gemini-OpenAI-Proxy from ${getRuntimeKey()}! You can try it with: curl ${origin}/v1/chat/completions \\ --H "Authorization: Bearer $YOUR_GEMINI_API_KEY" \\ --H "Content-Type: application/json" \\ --d '{ -"model": "gpt-3.5-turbo", -"messages": [{"role": "user", "content": "Hello"}], -"temperature": 0.7 -}' - - -`, - ) + -H "Authorization: Bearer $YOUR_GEMINI_API_KEY" \\ + -H "Content-Type: application/json" \\ + -d '{ + "model": "gpt-3.5-turbo", + "messages": [{"role": "user", "content": "Hello"}], + "temperature": 0.7 + }' +`) }) .post("/v1/chat/completions", chatProxyHandler)