Skip to content

Instantly share code, notes, and snippets.

@qya
Created June 18, 2025 00:49
Show Gist options
  • Save qya/18dd7ebedd3deffe8adabd3964added1 to your computer and use it in GitHub Desktop.
Save qya/18dd7ebedd3deffe8adabd3964added1 to your computer and use it in GitHub Desktop.
Proxy server that converts Anthropic API requests to OpenAI format and sends it to OpenRouter. It's used to use Claude Code with OpenRouter instead of the Anthropic API
// Cloudflare Workers version of the Anthropic proxy
export default {
async fetch(request, env, ctx) {
// Handle CORS preflight requests
if (request.method === 'OPTIONS') {
return new Response(null, {
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, POST, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Authorization, x-api-key',
},
});
}
const url = new URL(request.url);
// Handle root path
if (url.pathname === '/' && request.method === 'GET') {
return new Response('Hello World', {
headers: { 'Content-Type': 'text/plain' },
});
}
// Handle messages endpoint
if (url.pathname === '/v1/messages' && request.method === 'POST') {
return handleMessages(request, env);
}
return new Response('Not Found', { status: 404 });
},
};
async function handleMessages(request, env) {
try {
// Get configuration from environment variables
const AUTH_TOKEN = env.AUTH_TOKEN;
const baseUrl = env.ANTHROPIC_PROXY_BASE_URL || 'https://openrouter.ai/api';
const requiresApiKey = !env.ANTHROPIC_PROXY_BASE_URL;
const key = requiresApiKey ? env.OPENROUTER_API_KEY : null;
const model = 'deepseek/deepseek-chat-v3-0324:free';
const models = {
reasoning: env.REASONING_MODEL || model,
completion: env.COMPLETION_MODEL || model,
};
// Authentication
let token = request.headers.get('x-api-key');
if (!token) {
const authHeader = request.headers.get('authorization');
if (authHeader) {
token = authHeader.replace('Bearer ', '');
}
}
if (AUTH_TOKEN && token !== AUTH_TOKEN) {
return new Response(JSON.stringify({ error: 'AUTH_TOKEN validation failed' }), {
status: 401,
headers: { 'Content-Type': 'application/json' },
});
}
const payload = await request.json();
// Helper to normalize a message's content
const normalizeContent = (content) => {
if (typeof content === 'string') return content;
if (Array.isArray(content)) {
return content.map((item) => item.text).join(' ');
}
return null;
};
// Build messages array for the OpenAI payload
const messages = [];
// Add system messages if provided
if (payload.system && Array.isArray(payload.system)) {
payload.system.forEach((sysMsg) => {
const normalized = normalizeContent(sysMsg.text || sysMsg.content);
if (normalized) {
messages.push({
role: 'system',
content: normalized,
});
}
});
}
// Add user messages
if (payload.messages && Array.isArray(payload.messages)) {
payload.messages.forEach((msg) => {
const toolCalls = (Array.isArray(msg.content) ? msg.content : [])
.filter((item) => item.type === 'tool_use')
.map((toolCall) => ({
function: {
type: 'function',
id: toolCall.id,
function: {
name: toolCall.name,
parameters: toolCall.input,
},
},
}));
const newMsg = { role: msg.role };
const normalized = normalizeContent(msg.content);
if (normalized) newMsg.content = normalized;
if (toolCalls.length > 0) newMsg.tool_calls = toolCalls;
if (newMsg.content || newMsg.tool_calls) messages.push(newMsg);
if (Array.isArray(msg.content)) {
const toolResults = msg.content.filter(
(item) => item.type === 'tool_result'
);
toolResults.forEach((toolResult) => {
messages.push({
role: 'tool',
content: toolResult.text || toolResult.content,
tool_call_id: toolResult.tool_use_id,
});
});
}
});
}
// Helper function to recursively traverse JSON schema and remove format: 'uri'
const removeUriFormat = (schema) => {
if (!schema || typeof schema !== 'object') return schema;
if (schema.type === 'string' && schema.format === 'uri') {
const { format, ...rest } = schema;
return rest;
}
if (Array.isArray(schema)) {
return schema.map((item) => removeUriFormat(item));
}
const result = {};
for (const key in schema) {
if (key === 'properties' && typeof schema[key] === 'object') {
result[key] = {};
for (const propKey in schema[key]) {
result[key][propKey] = removeUriFormat(schema[key][propKey]);
}
} else if (key === 'items' && typeof schema[key] === 'object') {
result[key] = removeUriFormat(schema[key]);
} else if (
key === 'additionalProperties' &&
typeof schema[key] === 'object'
) {
result[key] = removeUriFormat(schema[key]);
} else if (
['anyOf', 'allOf', 'oneOf'].includes(key) &&
Array.isArray(schema[key])
) {
result[key] = schema[key].map((item) => removeUriFormat(item));
} else {
result[key] = removeUriFormat(schema[key]);
}
}
return result;
};
// Prepare tools
const tools = (payload.tools || [])
.filter((tool) => !['BatchTool'].includes(tool.name))
.map((tool) => ({
type: 'function',
function: {
name: tool.name,
description: tool.description,
parameters: removeUriFormat(tool.input_schema),
},
}));
const openaiPayload = {
model: payload.thinking ? models.reasoning : models.completion,
messages,
max_tokens: payload.max_tokens,
temperature: payload.temperature !== undefined ? payload.temperature : 1,
stream: payload.stream === true,
};
if (tools.length > 0) openaiPayload.tools = tools;
// Prepare headers
const headers = {
'Content-Type': 'application/json',
};
if (requiresApiKey) {
headers['Authorization'] = `Bearer ${key}`;
}
// Make request to OpenAI-compatible API
const openaiResponse = await fetch(`${baseUrl}/v1/chat/completions`, {
method: 'POST',
headers,
body: JSON.stringify(openaiPayload),
});
if (!openaiResponse.ok) {
const errorDetails = await openaiResponse.text();
return new Response(JSON.stringify({ error: errorDetails }), {
status: openaiResponse.status,
headers: { 'Content-Type': 'application/json' },
});
}
// Handle non-streaming response
if (!openaiPayload.stream) {
const data = await openaiResponse.json();
if (data.error) {
return new Response(JSON.stringify({ error: data.error.message }), {
status: 500,
headers: { 'Content-Type': 'application/json' },
});
}
const choice = data.choices[0];
const openaiMessage = choice.message;
// Map finish_reason to anthropic stop_reason
const mapStopReason = (finishReason) => {
switch (finishReason) {
case 'tool_calls':
return 'tool_use';
case 'stop':
return 'end_turn';
case 'length':
return 'max_tokens';
default:
return 'end_turn';
}
};
const stopReason = mapStopReason(choice.finish_reason);
const toolCalls = openaiMessage.tool_calls || [];
const messageId = data.id
? data.id.replace('chatcmpl', 'msg')
: 'msg_' + Math.random().toString(36).substr(2, 24);
const anthropicResponse = {
content: [
{
text: openaiMessage.content,
type: 'text',
},
...toolCalls.map((toolCall) => ({
type: 'tool_use',
id: toolCall.id,
name: toolCall.function.name,
input: JSON.parse(toolCall.function.arguments),
})),
],
id: messageId,
model: openaiPayload.model,
role: openaiMessage.role,
stop_reason: stopReason,
stop_sequence: null,
type: 'message',
usage: {
input_tokens: data.usage
? data.usage.prompt_tokens
: messages.reduce(
(acc, msg) => acc + msg.content.split(' ').length,
0
),
output_tokens: data.usage
? data.usage.completion_tokens
: openaiMessage.content.split(' ').length,
},
};
return new Response(JSON.stringify(anthropicResponse), {
headers: { 'Content-Type': 'application/json' },
});
}
// Handle streaming response
return handleStreamingResponse(openaiResponse, openaiPayload);
} catch (err) {
console.error(err);
return new Response(JSON.stringify({ error: err.message }), {
status: 500,
headers: { 'Content-Type': 'application/json' },
});
}
}
async function handleStreamingResponse(openaiResponse, openaiPayload) {
const { readable, writable } = new TransformStream();
const writer = writable.getWriter();
const encoder = new TextEncoder();
// Helper function to send SSE events
const sendSSE = async (event, data) => {
const sseMessage = `event: ${event}\ndata: ${JSON.stringify(data)}\n\n`;
await writer.write(encoder.encode(sseMessage));
};
// Process streaming response
(async () => {
try {
const messageId = 'msg_' + Math.random().toString(36).substr(2, 24);
// Send initial SSE event for message start
await sendSSE('message_start', {
type: 'message_start',
message: {
id: messageId,
type: 'message',
role: 'assistant',
model: openaiPayload.model,
content: [],
stop_reason: null,
stop_sequence: null,
usage: { input_tokens: 0, output_tokens: 0 },
},
});
// Send initial ping
await sendSSE('ping', { type: 'ping' });
let accumulatedContent = '';
let accumulatedReasoning = '';
let usage = null;
let textBlockStarted = false;
let encounteredToolCall = false;
const toolCallAccumulators = {};
const reader = openaiResponse.body.getReader();
const decoder = new TextDecoder();
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value);
const lines = chunk.split('\n');
for (const line of lines) {
const trimmed = line.trim();
if (trimmed === '' || !trimmed.startsWith('data:')) continue;
const dataStr = trimmed.replace(/^data:\s*/, '');
if (dataStr === '[DONE]') {
// Finalize the stream
if (encounteredToolCall) {
for (const idx in toolCallAccumulators) {
await sendSSE('content_block_stop', {
type: 'content_block_stop',
index: parseInt(idx, 10),
});
}
} else if (textBlockStarted) {
await sendSSE('content_block_stop', {
type: 'content_block_stop',
index: 0,
});
}
await sendSSE('message_delta', {
type: 'message_delta',
delta: {
stop_reason: encounteredToolCall ? 'tool_use' : 'end_turn',
stop_sequence: null,
},
usage: usage
? { output_tokens: usage.completion_tokens }
: {
output_tokens:
accumulatedContent.split(' ').length +
accumulatedReasoning.split(' ').length,
},
});
await sendSSE('message_stop', { type: 'message_stop' });
await writer.close();
return;
}
try {
const parsed = JSON.parse(dataStr);
if (parsed.error) {
throw new Error(parsed.error.message);
}
if (parsed.usage) {
usage = parsed.usage;
}
const delta = parsed.choices[0].delta;
if (delta && delta.tool_calls) {
for (const toolCall of delta.tool_calls) {
encounteredToolCall = true;
const idx = toolCall.index;
if (toolCallAccumulators[idx] === undefined) {
toolCallAccumulators[idx] = '';
await sendSSE('content_block_start', {
type: 'content_block_start',
index: idx,
content_block: {
type: 'tool_use',
id: toolCall.id,
name: toolCall.function.name,
input: {},
},
});
}
const newArgs = toolCall.function.arguments || '';
const oldArgs = toolCallAccumulators[idx];
if (newArgs.length > oldArgs.length) {
const deltaText = newArgs.substring(oldArgs.length);
await sendSSE('content_block_delta', {
type: 'content_block_delta',
index: idx,
delta: {
type: 'input_json_delta',
partial_json: deltaText,
},
});
toolCallAccumulators[idx] = newArgs;
}
}
} else if (delta && delta.content) {
if (!textBlockStarted) {
textBlockStarted = true;
await sendSSE('content_block_start', {
type: 'content_block_start',
index: 0,
content_block: {
type: 'text',
text: '',
},
});
}
accumulatedContent += delta.content;
await sendSSE('content_block_delta', {
type: 'content_block_delta',
index: 0,
delta: {
type: 'text_delta',
text: delta.content,
},
});
} else if (delta && delta.reasoning) {
if (!textBlockStarted) {
textBlockStarted = true;
await sendSSE('content_block_start', {
type: 'content_block_start',
index: 0,
content_block: {
type: 'text',
text: '',
},
});
}
accumulatedReasoning += delta.reasoning;
await sendSSE('content_block_delta', {
type: 'content_block_delta',
index: 0,
delta: {
type: 'thinking_delta',
thinking: delta.reasoning,
},
});
}
} catch (parseError) {
console.error('Error parsing JSON:', parseError);
}
}
}
} catch (error) {
console.error('Streaming error:', error);
await writer.close();
}
})();
return new Response(readable, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Access-Control-Allow-Origin': '*',
},
});
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment