Skip to content

Instantly share code, notes, and snippets.

@laiso
Last active July 28, 2025 16:56
Show Gist options
  • Save laiso/0796ce1d70132d757f2cedabd4730605 to your computer and use it in GitHub Desktop.
Save laiso/0796ce1d70132d757f2cedabd4730605 to your computer and use it in GitHub Desktop.
anth_proxy.ts: OpenAI compatible interface for Anthropic Client.
/**
* OpenAI compatible interface for Anthropic Client.
*
* Usage:
* ANTH_PROXY_API_KEY=XXX bun run anth_proxy.ts.ts https://api.openai.com/v1/chat/completions qwen/qwen3-coder:free
* ANTHROPIC_BASE_URL=http://localhost:3000 claude -p 'write a sample code in main.ts'
*
* LICENSE: MIT
* Inspired by https://github.com/kiyo-e/claude-code-proxy
*/
import * as http from "http";
interface AnthropicMessage {
role: string;
content: string | ContentBlock[];
}
interface ContentBlock {
type: string;
text?: string;
content?: any;
source?: {
type: string;
media_type?: string;
url?: string;
};
id?: string;
name?: string;
input?: any;
tool_use_id?: string;
}
interface AnthropicRequest {
model: string;
messages: AnthropicMessage[];
system?: string;
temperature?: number;
max_tokens?: number;
cache_control?: any;
tools?: any[];
}
interface OpenAIRequest {
model: string;
messages: { role: string; content: string }[];
temperature?: number;
max_tokens?: number;
stream?: boolean;
tools?: any[];
tool_choice?: string | any;
}
interface OpenAIChoice {
message: { content: string };
finish_reason: string | null;
}
interface OpenAIUsage {
prompt_tokens?: number;
completion_tokens?: number;
}
interface OpenAIResponse {
choices: OpenAIChoice[];
usage?: OpenAIUsage;
}
interface AnthropicResponse {
id: string;
type: string;
role: string;
model: string;
content: ContentBlock[];
stop_reason: string;
stop_sequence: string | null;
usage: {
input_tokens: number;
output_tokens: number;
cache_creation_input_tokens: number;
cache_read_input_tokens: number;
service_tier: string;
};
}
interface AnthropicStreamResponse extends AnthropicResponse {
streamed_data: StreamedData[];
}
interface StreamedData {
nonce?: string;
type: string;
message?: AnthropicStreamResponse;
content_block?: ContentBlock;
index?: number;
delta?: {
type: string;
text?: string;
stop_reason?: string;
stop_sequence?: string | null;
partial_json?: string;
};
usage?: { output_tokens: number };
error?: { type: string; message: string };
}
const generateId = (length = 15) => Math.random().toString(36).substring(2, length + 2);
const log = (requestId: string, message: string) => console.log(`[${requestId}] ${message}`);
const logError = (requestId: string, message: string, error?: any) => {
console.error(`[${requestId}] ${message}`, error || '');
};
const sendError = (res: http.ServerResponse, status: number, error: string, details?: string) => {
res.writeHead(status, { "Content-Type": "application/json" });
res.end(JSON.stringify({ error, ...(details && { details }) }));
};
const createUsage = (input = 0, output = 0) => ({
input_tokens: input,
output_tokens: output,
cache_creation_input_tokens: 0,
cache_read_input_tokens: 0,
service_tier: "standard"
});
const createStreamData = (type: string, data: any = {}, nonce = generateId(4)) => ({
nonce,
type,
...data
});
const parseArgs = () => {
const args = process.argv.slice(2);
let targetUrl: string | undefined;
let model: string | undefined;
for (let i = 0; i < args.length; i++) {
if (args[i] === '--base-url' && i + 1 < args.length) {
targetUrl = args[++i];
} else if (!targetUrl) {
targetUrl = args[i];
} else if (!model) {
model = args[i];
}
}
const apiKey = process.env.ANTH_PROXY_API_KEY;
if (!targetUrl || !model || !apiKey) {
console.error("Error: Required argument or environment variable is missing");
console.error("Usage: API_KEY=<API_KEY> bun run index.ts [--base-url] <TARGET_URL> <MODEL>");
console.error("Examples:");
console.error(" bun run index.ts https://api.openai.com/v1/chat/completions gpt-4");
console.error(" bun run index.ts --base-url https://openrouter.ai/api/v1/chat/completions qwen/qwen3-coder:free");
process.exit(1);
}
try {
new URL(targetUrl);
} catch {
console.error(`Error: Invalid target URL format: ${targetUrl}`);
console.error("Please provide a valid URL (e.g., https://api.openai.com/v1/chat/completions)");
process.exit(1);
}
return { targetUrl, model, apiKey };
};
const { targetUrl, model, apiKey } = parseArgs();
http.createServer(async (req, res) => {
const startTime = Date.now();
const requestId = generateId();
log(requestId, `${req.method} ${req.url} - ${new Date().toISOString()}`);
const url = new URL(req.url || '', `http://${req.headers.host}`);
if (req.method !== "POST" || url.pathname !== "/v1/messages") {
log(requestId, "404 Not found");
res.writeHead(404);
return res.end("Not found");
}
try {
const buffers = [];
for await (const chunk of req) buffers.push(chunk);
const body = JSON.parse(Buffer.concat(buffers).toString());
if (!body?.messages || !Array.isArray(body.messages)) {
logError(requestId, "Invalid request body or missing messages array");
return sendError(res, 400, "Invalid request body", "messages array is required");
}
const cleanedBody = removeCacheControl(body) as AnthropicRequest;
const openaiPayload = transformRequestBody(cleanedBody, model);
log(requestId, `Forwarding to OpenAI: ${targetUrl}`);
if (openaiPayload.tools) log(requestId, `Request includes ${openaiPayload.tools.length} tools`);
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 30000);
const response = await fetch(targetUrl, {
method: "POST",
headers: {
Authorization: `Bearer ${apiKey}`,
"Content-Type": "application/json"
},
body: JSON.stringify(openaiPayload),
signal: controller.signal
});
clearTimeout(timeoutId);
log(requestId, `OpenAI response status: ${response.status}`);
if (!response.ok) {
const errorBody = await response.text();
logError(requestId, "OpenAI API error", errorBody);
return sendError(res, response.status, JSON.parse(errorBody));
}
res.writeHead(200, {
"Content-Type": "application/json",
"Cache-Control": "no-cache",
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Headers": "Cache-Control"
});
const anthropicResponse = await handleStreamingResponse(response, model, requestId);
log(requestId, `Response sent successfully (${Date.now() - startTime}ms)`);
res.end(JSON.stringify(anthropicResponse));
} catch (error) {
logError(requestId, "Request processing error", error);
const [statusCode, message] = getErrorResponse(error);
sendError(res, statusCode, message, error instanceof Error ? error.message : String(error));
}
}).listen(3000, () => {
console.log("Claude→OpenAI proxy listening on http://localhost:3000/v1/messages");
});
const getErrorResponse = (error: unknown): [number, string] => {
if (error instanceof TypeError && error.message.includes('fetch')) {
return [503, "Network error: Unable to connect to target API"];
}
if (error instanceof Error && error.name === 'AbortError') {
return [504, "Request timeout: Target API did not respond within 30 seconds"];
}
if (error instanceof Error) {
return [500, `Request failed: ${error.message}`];
}
return [500, "Internal server error"];
};
function transformCompleteResponse(openaiResponse: OpenAIResponse, model: string): AnthropicResponse {
const choice = openaiResponse.choices?.[0];
const content = choice?.message?.content || "";
const usage = openaiResponse.usage || {};
return {
id: `msg_${Date.now()}_${generateId()}`,
type: "message",
role: "assistant",
model,
content: [{ type: "text", text: content }],
stop_reason: mapFinishReasonToStopReason(choice?.finish_reason),
stop_sequence: null,
usage: createUsage(usage.prompt_tokens, usage.completion_tokens)
};
}
function mapFinishReasonToStopReason(finishReason: string | null | undefined): string {
const mapping: Record<string, string> = {
stop: "end_turn",
length: "max_tokens",
content_filter: "content_filter",
tool_calls: "tool_use",
function_call: "tool_use"
};
return mapping[finishReason || ""] || "end_turn";
}
function transformRequestBody(anthropicBody: AnthropicRequest, model: string): OpenAIRequest {
const messages: { role: string; content: string }[] = [];
if (anthropicBody.system) {
const systemContent = processContent(anthropicBody.system);
if (systemContent) messages.push({ role: "system", content: systemContent });
}
anthropicBody.messages?.forEach(msg => {
if (msg?.role && msg?.content) {
const content = processContent(msg.content);
if (content) messages.push({ role: msg.role, content });
}
});
const request: OpenAIRequest = {
model,
messages,
temperature: anthropicBody.temperature ?? 0.7,
max_tokens: anthropicBody.max_tokens ?? 1024,
stream: true,
};
if (anthropicBody.tools?.length) {
const cleanedTools = validateAndCleanTools(anthropicBody.tools);
if (cleanedTools.length) {
request.tools = cleanedTools;
request.tool_choice = "auto";
}
}
return request;
}
function processContent(content: string | ContentBlock[] | ContentBlock): string | null {
if (typeof content === 'string') return content;
const items = Array.isArray(content) ? content : [content];
const textParts: string[] = [];
for (const item of items) {
if (!item?.type) continue;
switch (item.type) {
case 'text':
if (item.text) textParts.push(item.text);
break;
case 'image':
const imageDesc = item.source?.type === 'base64'
? `[Image: ${item.source.media_type || 'image'}]`
: item.source?.type === 'url'
? `[Image URL: ${item.source.url}]`
: '[Image]';
textParts.push(imageDesc);
break;
case 'tool_use':
if (item.id && item.name) {
const input = item.input && Object.keys(item.input).length
? JSON.stringify(item.input, null, 0)
: '';
textParts.push(`[Tool use: ${item.name}(${input})]`);
}
break;
case 'tool_result':
if (item.tool_use_id) {
const result = item.content ? JSON.stringify(item.content) : 'null';
textParts.push(`[Tool result: ${result}]`);
}
break;
default:
textParts.push(`[Unsupported content: ${item.type}]`);
}
}
return textParts.length ? textParts.join('\n') : null;
}
function validateAndCleanTools(tools: any[]): any[] {
return tools.map((tool, i) => {
if (!tool || typeof tool !== 'object') return null;
if (tool.type === 'function' && tool.function?.name) {
return {
type: 'function',
function: {
name: tool.function.name,
...(tool.function.description && { description: tool.function.description }),
...(tool.function.parameters && { parameters: tool.function.parameters }),
...(tool.function.strict !== undefined && { strict: tool.function.strict })
}
};
}
if (tool.name) {
return {
type: 'function',
function: {
name: tool.name,
...(tool.description && { description: tool.description }),
...(tool.input_schema && { parameters: tool.input_schema }),
...(tool.strict !== undefined && { strict: tool.strict })
}
};
}
return null;
}).filter(Boolean);
}
function removeCacheControl(obj: unknown): unknown {
if (Array.isArray(obj)) return obj.map(removeCacheControl);
if (obj && typeof obj === 'object') {
const newObj: Record<string, unknown> = {};
for (const [key, value] of Object.entries(obj as Record<string, unknown>)) {
if (key !== 'cache_control') newObj[key] = removeCacheControl(value);
}
return newObj;
}
return obj;
}
async function handleStreamingResponse(response: Response, model: string, requestId: string): Promise<AnthropicStreamResponse> {
const reader = response.body?.getReader();
if (!reader) throw new Error("Response body is not readable");
const messageId = `msg_${Date.now()}_${generateId()}`;
let currentText = "";
let outputTokens = 0;
let finishReason: string | null = null;
let toolCalls: ContentBlock[] = [];
let encounteredToolCall = false;
let toolCallArguments: Record<string, string> = {};
const streamedData: StreamedData[] = [];
const addStreamData = (type: string, data: any = {}) => {
streamedData.push(createStreamData(type, data));
};
try {
addStreamData("message_start", {
message: {
id: messageId,
type: "message",
role: "assistant",
model,
content: [],
stop_reason: null,
stop_sequence: null,
usage: createUsage(0, 1),
streamed_data: []
}
});
addStreamData("content_block_start", {
index: 0,
content_block: { type: "text", text: "" }
});
addStreamData("ping");
while (true) {
const { value, done } = await reader.read();
if (done) break;
const chunk = new TextDecoder().decode(value);
const lines = chunk.split('\n').filter(line => line.trim());
for (const line of lines) {
if (!line.startsWith('data: ')) continue;
const data = line.slice(6);
if (data === '[DONE]') {
finishReason = "end_turn";
break;
}
try {
const json = JSON.parse(data);
const choice = json?.choices?.[0];
if (!choice?.delta) continue;
const delta = choice.delta;
if (delta.content) {
currentText += delta.content;
outputTokens++;
addStreamData("content_block_delta", {
index: 0,
delta: { type: "text_delta", text: delta.content }
});
}
if (delta.tool_calls) {
encounteredToolCall = true;
processToolCalls(delta.tool_calls, toolCalls, toolCallArguments, streamedData, requestId);
}
if (choice.finish_reason) {
finishReason = mapFinishReasonToStopReason(choice.finish_reason);
} else if (encounteredToolCall && toolCalls.length > 0) {
finishReason = "tool_use";
}
} catch (e) {
console.warn(`[${requestId}] Failed to parse streaming chunk:`, data);
}
}
}
} catch (error) {
logError(requestId, "Streaming error", error);
addStreamData("error", {
error: { type: "server_error", message: "Streaming failed" }
});
throw error;
} finally {
reader.releaseLock();
}
finalizeToolCalls(toolCalls, toolCallArguments, requestId);
if (encounteredToolCall && toolCalls.length > 0) {
toolCalls.forEach((_, i) => addStreamData("content_block_stop", { index: i }));
} else if (currentText.trim()) {
addStreamData("content_block_stop", { index: 0 });
}
addStreamData("message_delta", {
delta: { type: "message_delta", stop_reason: finishReason, stop_sequence: null },
usage: { output_tokens: outputTokens }
});
addStreamData("message_stop");
const content: ContentBlock[] = [];
if (currentText.trim()) content.push({ type: "text", text: currentText });
if (toolCalls.length > 0) content.push(...toolCalls);
if (content.length === 0) content.push({ type: "text", text: "" });
return {
id: messageId,
type: "message",
role: "assistant",
model,
content,
stop_reason: finishReason || "end_turn",
stop_sequence: null,
usage: createUsage(0, outputTokens),
streamed_data: streamedData
};
}
function processToolCalls(toolCalls: any[], existingToolCalls: ContentBlock[], toolCallArguments: Record<string, string>, streamedData: StreamedData[], requestId: string) {
for (const toolCall of toolCalls) {
if (!toolCall.index || !toolCall.id) continue;
let existingToolCall = existingToolCalls.find(tc => tc.id === toolCall.id);
if (!existingToolCall) {
const toolUseBlock: ContentBlock = {
type: "tool_use",
id: toolCall.id,
name: toolCall.function?.name || "",
input: {}
};
streamedData.push(createStreamData("content_block_start", {
index: toolCall.index,
content_block: toolUseBlock
}));
existingToolCalls.push(toolUseBlock);
existingToolCall = toolUseBlock;
toolCallArguments[toolCall.id] = "";
log(requestId, `Started new tool call: ${toolCall.function?.name} (ID: ${toolCall.id})`);
}
if (toolCall.function?.name && existingToolCall) {
existingToolCall.name = toolCall.function.name;
}
if (toolCall.function?.arguments && existingToolCall && toolCall.id) {
const args = toolCall.function.arguments;
toolCallArguments[toolCall.id] += args;
streamedData.push(createStreamData("content_block_delta", {
index: toolCall.index,
delta: { type: "input_json_delta", partial_json: args }
}));
try {
const argsStr = toolCallArguments[toolCall.id];
if (argsStr) {
existingToolCall.input = JSON.parse(argsStr);
}
} catch (e) {
}
}
}
}
function finalizeToolCalls(toolCalls: ContentBlock[], toolCallArguments: Record<string, string>, requestId: string) {
for (const toolCall of toolCalls) {
if (!toolCall.id) continue;
const args = toolCallArguments[toolCall.id];
if (args) {
try {
toolCall.input = JSON.parse(args);
log(requestId, `Final parsing successful for ${toolCall.name}`);
} catch (e) {
console.warn(`[${requestId}] Final parsing failed for ${toolCall.name}`);
toolCall.input = {};
}
} else {
toolCall.input = {};
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment