Skip to content

Instantly share code, notes, and snippets.

@andyjessop
Last active February 21, 2025 13:42
Show Gist options
  • Save andyjessop/5312105fac133ed13e8bc7476a277ad4 to your computer and use it in GitHub Desktop.
Save andyjessop/5312105fac133ed13e8bc7476a277ad4 to your computer and use it in GitHub Desktop.
Workflow + structured outputs
import { WorkflowEntrypoint, WorkflowEvent, WorkflowStep } from 'cloudflare:workers';
import { createWorkersAi } from 'workers-ai-provider';
import { generateObject } from 'ai';
import z from 'zod';
type Env = {
AI: Ai;
MY_WORKFLOW: Workflow;
};
type Params = {
prompt: string;
};
const orchestratorSchema = z.object({
tasks: z.array(z.string()),
});
const workerOutputSchema = z.object({
response: z.string(),
});
const aggregatorSchema = z.object({
finalResult: z.string(),
});
export class TaskWorkflow extends WorkflowEntrypoint<Env, Params> {
async run(event: WorkflowEvent<Params>, step: WorkflowStep) {
const prompt = event.payload.prompt;
const workersai = createWorkersAI({
binding: this.env.AI
});
const bigModel = openai("@cf/meta/llama-3.3-70b-instruct-fp8-fast");
const smallModel = openai("@cf/meta/llama-3.1-8b-instruct-fp8");
// --- Step 1: Orchestrator Generates Subtasks ---
const orchestratorPrompt = `Given the following complex coding task:\n\n${prompt}\n\nPlease break it down into a list of subtasks needed to complete the task. Return your answer as a JSON object in the format { "tasks": ["Task 1", "Task 2", ...] }`;
const orchestratorResult = await step.do('orchestrator-step', async () => {
// Call the big model with the orchestrator prompt.
const result = await generateObject({
model: bigModel,
schema: orchestratorSchema,
prompt: orchestratorPrompt,
});
return result.object;
});
// --- Step 2: Workers Execute Each Subtask in Parallel ---
const workerResults = await step.do('worker-step', async () => {
const promises = orchestratorResult.tasks.map((taskPrompt: string) => {
const workerLLMPrompt = `You are a specialised coding assistant. Please complete the following subtask:\n\n${taskPrompt}\n\nReturn your result as a JSON object in the format { "response": "Your detailed response here." }`;
return generateObject({
model: smallModel,
schema: workerOutputSchema,
prompt: workerLLMPrompt,
});
});
const results = await Promise.all(promises);
return results.map(result => result.object.response);
});
// --- Step 3: Aggregator Synthesises the Worker Responses ---
const aggregatorPrompt = `The following are responses from various workers addressing subtasks for a complex coding task:\n\n${workerResults
.map((resp, index) => `Subtask ${index + 1}: ${resp}`)
.join("\n\n")}\n\nPlease synthesise these responses into a single, comprehensive final result. Return your answer as a JSON object in the format { "finalResult": "Your comprehensive result here." }`;
const aggregatorResult = await step.do('aggregator-step', async () => {
const result = await generateObject({
model: bigModel,
schema: aggregatorSchema,
prompt: aggregatorPrompt,
});
return result.object;
});
step.log(`Aggregator result: ${JSON.stringify(aggregatorResult.finalResult)}`);
// --- Final Step: Aggregate and Log the Complete Result ---
const finalOutput = {
orchestratorTasks: orchestratorResult.tasks,
workerResponses: workerResults,
finalResult: aggregatorResult.finalResult,
};
return await step.do('finalise', async () => {
step.log(`Final output: ${JSON.stringify(finalOutput)}`);
return finalOutput;
});
}
}
export default {
async fetch(req: Request, env: Env): Promise<Response> {
const url = new URL(req.url);
const instanceId = url.searchParams.get('instanceId');
if (instanceId) {
const instance = await env.MY_WORKFLOW.get(instanceId);
return Response.json({ status: await instance.status() });
}
const prompt = url.searchParams.get('prompt') || 'Default complex coding task prompt';
const instance = await env.MY_WORKFLOW.create({ prompt });
return Response.json({
id: instance.id,
details: await instance.status(),
});
},
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment