Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .dev.vars.example
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ GCP_SERVICE_ACCOUNT={"access_token":"ya29.a0AS3H6Nx...","refresh_token":"1//09Ft
# When set, clients must include "Authorization: Bearer <your-api-key>" header
# Example: sk-1234567890abcdef1234567890abcdef
OPENAI_API_KEY=sk-your-secret-api-key-here

GEMINI_PROJECT_MAP={"sk-key-0": {"GEMINI_PROJECT_ID": "xxxx","GCP_SERVICE_ACCOUNT":{"access_token":"ya29.a0AS3H6Nx...","refresh_token":"1//09FtpJYpxOd...","scope":"https://www.googleapis.com/auth/cloud-platform ...","token_type":"Bearer","id_token":"eyJhbGciOiJSUzI1NiIs...","expiry_date":1750927763467}}, "sk-key-1": {"GEMINI_PROJECT_ID": "xxxx","GCP_SERVICE_ACCOUNT":{"access_token":"ya29.a0AS3H6Nx...","refresh_token":"1//09FtpJYpxOd...","scope":"https://www.googleapis.com/auth/cloud-platform ...","token_type":"Bearer","id_token":"eyJhbGciOiJSUzI1NiIs...","expiry_date":1750927763467}}}
# Optional: Enable fake thinking output for thinking models (set to "true" to enable)
# When enabled, models marked with thinking: true will generate synthetic reasoning text
# before providing their actual response, similar to OpenAI's o3 model behavior
Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Transform Google's Gemini models into OpenAI-compatible endpoints using Cloudfla
- 🆓 **Free Tier Access** - Leverage Google's free tier through Code Assist API
- 📡 **Real-time Streaming** - Server-sent events for live responses with token usage
- 🎭 **Multiple Models** - Access to latest Gemini models including experimental ones
- 🧑🏻‍🤝‍🧑🏻 **Multiple Google Profile** - Switch your Google profile based on OPENAI_API_KEY

## 🤖 Supported Models

Expand Down Expand Up @@ -161,7 +162,7 @@ npm run dev
| `GCP_SERVICE_ACCOUNT` | ✅ | OAuth2 credentials JSON string. |
| `GEMINI_PROJECT_ID` | ❌ | Google Cloud Project ID (auto-discovered if not set). |
| `OPENAI_API_KEY` | ❌ | API key for authentication. If not set, the API is public. |

| `GEMINI_PROJECT_MAP` | ❌ | Mutiple OAuth2 Profile|
#### Thinking & Reasoning

| Variable | Description |
Expand Down
44 changes: 42 additions & 2 deletions src/middlewares/auth.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import { MiddlewareHandler } from "hono";
import { Env } from "../types";

import {
KV_TOKEN_KEY
} from "../config";
/**
* Middleware to enforce OpenAI-style API key authentication if OPENAI_API_KEY is set in the environment.
* Checks for 'Authorization: Bearer <key>' header on protected routes.
Expand Down Expand Up @@ -46,7 +48,45 @@ export const openAIApiKeyAuth: MiddlewareHandler<{ Bindings: Env }> = async (c,
}

const providedKey = match[1];
if (providedKey !== c.env.OPENAI_API_KEY) {
//console.log(`providedKe:${providedKey}, c.env.GEMINI_PROJECT_MAP: ${c.env.GEMINI_PROJECT_MAP}`)
// const AUTH_MAP = JSON.parse(c.env.GEMINI_PROJECT_MAP || "{}");
// GEMINI_PROJECT_MAP may excceds cloudflare secret limit
// read from KV storage
var AUTH_MAP = await c.env.GEMINI_CLI_KV.get("GEMINI_PROJECT_MAP","json");
if (! AUTH_MAP ){
AUTH_MAP = JSON.parse(c.env.GEMINI_PROJECT_MAP || "{}");
try {
await c.env.GEMINI_CLI_KV.put("GEMINI_PROJECT_MAP", JSON.stringify(AUTH_MAP));
console.log("Saved GEMINI_PROJECT_MAP to KV storage");
} catch (kvError) {
console.log(`Failed to save GEMINI_PROJECT_MAP to KV storage: ${kvError}`);

}
}
//console.log(`AUTH_MAP:${AUTH_MAP}`, AUTH_MAP);
if (providedKey == c.env.OPENAI_API_KEY){
return next();
}

if (AUTH_MAP.hasOwnProperty(providedKey)){

const provider = AUTH_MAP[providedKey];
if(provider.hasOwnProperty("GCP_SERVICE_ACCOUNT"))
c.env.GCP_SERVICE_ACCOUNT = JSON.stringify(provider["GCP_SERVICE_ACCOUNT"]);
if(provider.hasOwnProperty("GEMINI_PROJECT_ID"))
c.env.GEMINI_PROJECT_ID = provider["GEMINI_PROJECT_ID"] ;

if (providedKey !== c.env.OPENAI_API_KEY){
try {
await c.env.GEMINI_CLI_KV.delete(KV_TOKEN_KEY);
console.log("Cleared cached token from KV storage");
} catch (kvError) {
console.log("Error clearing KV cache:", kvError);
}
}

c.env.OPENAI_API_KEY = providedKey;
}else {
return c.json(
{
error: {
Expand Down
1 change: 1 addition & 0 deletions src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ export interface Env {
GCP_SERVICE_ACCOUNT: string; // Now contains OAuth2 credentials JSON
GEMINI_PROJECT_ID?: string;
GEMINI_CLI_KV: KVNamespace; // Cloudflare KV for token caching
GEMINI_PROJECT_MAP?: string;// Multiple auth profile
OPENAI_API_KEY?: string; // Optional API key for authentication
ENABLE_FAKE_THINKING?: string; // Optional flag to enable fake thinking output (set to "true" to enable)
ENABLE_REAL_THINKING?: string; // Optional flag to enable real Gemini thinking output (set to "true" to enable)
Expand Down