Skip to content

Instantly share code, notes, and snippets.

@therealkenc
Last active January 1, 2025 08:28
Show Gist options
  • Save therealkenc/c2fff3f463c241f698b262c9c959cb67 to your computer and use it in GitHub Desktop.
Save therealkenc/c2fff3f463c241f698b262c9c959cb67 to your computer and use it in GitHub Desktop.
Google OAI compatibility endpoint test
#!/usr/bin/env -S npm run tsn -T
import fetch from 'node-fetch';
import { RequestInit, Response, RequestInfo } from 'node-fetch';
import 'openai/shims/web'; // <--- Wait, wut? Why "shims/web"??? Why not "shims/node"???
import OpenAI from 'openai';
import { RunnableToolFunction } from 'openai/lib/RunnableFunction';
const config = {
gemini: {
apiKey: process.env['GEMINI_API_KEY']!,
baseURL: 'https://generativelanguage.googleapis.com/v1beta/openai/',
model: 'gemini-2.0-flash-exp',
},
ollama: {
apiKey: 'ollama',
baseURL: 'http://localhost:11434/v1',
model: 'llama3.1',
},
} as const;
// Let's default to ollama for now
const provider = 'gemini';
const customFetch = async (url: RequestInfo, init?: RequestInit): Promise<Response> => {
if (init?.method === 'POST') {
const body = JSON.parse(init.body as string);
console.log('\nOUTGOING REQUEST:', init.method, url);
console.log('Request body:', JSON.stringify(body, null, 2));
// Send cleaned body
init.body = JSON.stringify(body);
}
const response = await fetch(url, init);
// Clone the response so we can read the body
const clone = response.clone();
const body = await clone.text();
try {
const jsonBody = JSON.parse(body);
console.log('\nINCOMING RESPONSE:', response.status);
console.log('Response body:', JSON.stringify(jsonBody, null, 2));
} catch (e) {
console.log('\nINCOMING RESPONSE (raw):', response.status);
console.log('Response body:', body);
}
return response;
};
const openai = new OpenAI({
apiKey: config[provider].apiKey,
baseURL: config[provider].baseURL,
fetch: customFetch,
});
interface FunctionArgs {
timezone: string;
}
function getTime(args: FunctionArgs) {
const time = new Date().toLocaleString('en-US', { timeZone: args.timezone });
return {
time: time,
};
}
const tools: RunnableToolFunction<any>[] = [
{
type: 'function',
function: {
name: 'time',
description: 'function to return the for a given timezone',
parameters: {
type: 'object',
properties: {
timezone: {
type: 'string',
description: 'The IANA timezone identifier, eg: America/New_York',
},
},
},
function: getTime,
parse: JSON.parse,
},
},
];
async function main() {
console.log('Starting main function...');
const runner = await openai.beta.chat.completions
.runTools({
model: config[provider].model,
stream: true,
tools,
messages: [
{
role: 'system',
content: 'Use the time() function when asked the time',
},
{
role: 'user',
content: 'What time is it in New York?',
},
],
})
.on('message', (msg) => {
console.log('Received message:', JSON.stringify(msg, null, 2));
})
.on('functionCall', (functionCall) => console.log('functionCall', functionCall))
.on('functionCallResult', (functionCallResult) => console.log('functionCallResult', functionCallResult))
.on('content', (diff) => process.stdout.write(diff));
await runner.finalChatCompletion();
}
main();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment