Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions ProxyIntro.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@

# cf-openai-gemini-proxy.js
|chatgpt3.5 feature|support?|
|---|---|
|single conversion| yes|
|multi conversion| yes|
|stream content|yes|
|embedded| yes|
|function call|no|
|多模态|no|

# cf-openai-qwen-proxy.js

|chatgpt3.5 feature|support?|
|---|---|
|single conversion| yes|
|multi conversion| yes|
|stream content|yes|
|embedded| no|
|function call|no|
|多模态|no|
294 changes: 294 additions & 0 deletions cf-openai-gemini-proxy.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,294 @@
// The deployment name you chose when you deployed the model.
const chatmodel = 'gemini-pro';
const embeddmodel = 'embedding-001';

addEventListener("fetch", (event) => {
event.respondWith(handleRequest(event.request));
});

async function handleRequest(request) {
if (request.method === 'OPTIONS') {
return handleOPTIONS(request)
}
const url = new URL(request.url);
if (url.pathname === '/v1/chat/completions' || url.pathname === '/v1/completions') {
return handleRequestWithTransform(request, transformCommonRequest, transformCommonResponse);
} else if (url.pathname === '/v1/embeddings') {
return handleRequestWithTransform(request, transformEmbeddingRequest, transformEmbeddingResponse);
} else {
return new Response('404 Not Found for ' + url.pathname, { status: 404 })
}
}

function transformURL(request) {
const url = new URL(request.url);
if (url.pathname === '/v1/chat/completions') {
var path = "generateContent"
var deployName = chatmodel;
} else if (url.pathname === '/v1/completions') {
var path = "generateContent"
var deployName = chatmodel;
} else if (url.pathname === '/v1/embeddings') {
var path = "embedContent"
var deployName = embeddmodel;
} else {
return null;
}

const authKey = request.headers.get('Authorization');
const apiKey = authKey.replace('Bearer ', '');
return `https://generativelanguage.googleapis.com/v1/models/${deployName}:${path}?key=${apiKey}`
}

async function handleRequestWithTransform(request, transformRequestBody, transformResponseBody) {
let body = await request.json();
const fetchAPI = transformURL(request);
if (fetchAPI === null) {
return new Response('404 Not Found', { status: 404 })
}


const transformedBody = transformRequestBody(body);
const payload = {
method: request.method,
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(transformedBody),
};
const response = await fetch(fetchAPI, payload);
if (!response.ok) {
return new Response(response.statusText, { status: response.status });
}
const geminiData = await response.json();
const transformedResponse = transformResponseBody(geminiData);

if (body?.stream != true) {
return new Response(JSON.stringify(transformedResponse), {
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': '*',
'Access-Control-Allow-Headers': '*'
}
});
} else {
let { readable, writable } = new TransformStream();
streamResponse(transformedResponse, writable);
return new Response(readable, {
headers: {
'Content-Type': 'text/event-stream',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': '*',
'Access-Control-Allow-Headers': '*'
}
});
}
}


// 现在 gemini 还不支持 function,所以这个函数暂时没用
function convert2GeminiFunctionDeclaration(tools, tool_choice) {
if (tools === undefined || tool_choice === undefined || tool_choice === "none") {
return [];
}

// TODO - add support for tool_choice

const result = [];
const functionDeclarations = [];

for (const tool of tools) {
if (tool.type === "function") {
var functionDeclaration = {
name: tool.function.name,
description: tool.function.description,
parameters: tool.function.parameters,
};
functionDeclarations.push(functionDeclaration);
}
}

if (functionDeclarations.length > 0) {
const toolObject = {
functionDeclarations,
};
result.push(toolObject);
}

return result;
}

function transformEmbeddingRequest(body) {
return {
"model": "models/embedding-001",
"content": {
"parts": [
{
"text": body?.input
}
]
}
};
}

function transformEmbeddingResponse(geminiData) {
return {
"object": "embedding",
"embedding": geminiData?.embedding?.values || [],
"index": 0
};
}

function transformCommonRequest(body) {

let messages = body?.messages || [];
if (messages.length === 0) {
messages.push({ role: 'user', content: '' });
} else {
// 如果相邻的两个 message 的 role 相同,那么就把它们合并成一个 message
let mergedMessages = [];
let lastRole = null;
messages.forEach((message) => {
if (message.role === 'system') {
message.role = 'user';
} else if (message.role === 'assistant') {
message.role = 'model';
}

if (lastRole === message.role) {
mergedMessages[mergedMessages.length - 1].content += message.content + '\n';
} else {
mergedMessages.push(message);
}
lastRole = message.role;
});

messages = mergedMessages;
}

var ret = {
// body?.messages 是一个数组,每个元素是一个对象,包含 role 和 content 两个属性
// 目标是把这个数组转换成 {}
// if role is 'system', then delete the message

contents: messages.map(message => ({
role: message.role,
parts: { text: message.content },
})),

generationConfig: {
temperature: body?.temperature,
candidateCount: body?.n,
topP: body?.top_p,
}
};

console.log(ret);
return ret;
}

// Function to transform the response
function transformCommonResponse(GeminiData) {
// Check if the 'candidates' array exists and if it's not empty
if (!GeminiData.candidates) {
// If it doesn't exist or is empty, create a default candidate message
GeminiData.candidates = [
{
"content": {
"parts": [
{
"text": "Oops, Model respond nothing."
}
],
"role": "model"
},
"finishReason": "STOP",
"index": 0,
"safetyRatings": [
{
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
"probability": "NEGLIGIBLE"
},
{
"category": "HARM_CATEGORY_HATE_SPEECH",
"probability": "NEGLIGIBLE"
},
{
"category": "HARM_CATEGORY_HARASSMENT",
"probability": "NEGLIGIBLE"
},
{
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
"probability": "NEGLIGIBLE"
}
]
}
];
}

// console.log(GeminiData.candidates);

var ret = {
id: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK",
object: 'chat.completion',
created: Math.floor(Date.now() / 1000), // Current Unix timestamp
model: 'gpt-3.5-turbo', // Static model name
usage: {
prompt_tokens: GeminiData.usageMetadata?.promptTokenCount, // This is a placeholder. Replace with actual token count if available
completion_tokens: GeminiData.usageMetadata?.candidatesTokenCount, // This is a placeholder. Replace with actual token count if available
total_tokens: GeminiData.usageMetadata?.totalTokenCount, // This is a placeholder. Replace with actual token count if available
},
choices: GeminiData.candidates.map((candidate) => ({
message: {
role: 'assistant',
content: candidate.content.parts[0].text,
},
finish_reason: 'stop', // Static finish reason
index: candidate.index,
})),
};

return ret;
}


function streamResponse(response, writable) {
let encoder = new TextEncoder();
let writer = writable.getWriter();

let content = response.choices[0].message.content;

let chunks = content.split("\n\n") || [];
chunks.forEach((chunk, i) => {
let chunkResponse = {
...response,
object: "chat.completion.chunk",
choices: [{
index: response.choices[0].index,
delta: { ...response.choices[0].message, content: chunk },
finish_reason: i === chunks.length - 1 ? 'stop' : null // Set 'stop' for the last chunk
}],
usage: null
};

writer.write(encoder.encode(`data: ${JSON.stringify(chunkResponse)}\n\n`));
});

// Write the done signal
writer.write(encoder.encode(`data: [DONE]\n`));

writer.close();
}



async function handleOPTIONS(request) {
return new Response("pong", {
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': '*',
'Access-Control-Allow-Headers': '*'
}
})
}
Loading