import { v4 as uuidv4 } from "uuid";
import * as z from "zod";
import { StateGraph, START, END } from "@langchain/langgraph";
import { ChatAnthropic } from "@langchain/anthropic";
import { MemorySaver } from "@langchain/langgraph";
const State = z.object({
topic: z.string().optional(),
joke: z.string().optional(),
});
const model = new ChatAnthropic({
model: "claude-sonnet-4-5-20250929",
temperature: 0,
});
// Build workflow
const workflow = new StateGraph(State)
// Add nodes
.addNode("generateTopic", async (state) => {
// LLM call to generate a topic for the joke
const msg = await model.invoke("Give me a funny topic for a joke");
return { topic: msg.content };
})
.addNode("writeJoke", async (state) => {
// LLM call to write a joke based on the topic
const msg = await model.invoke(`Write a short joke about ${state.topic}`);
return { joke: msg.content };
})
// Add edges to connect nodes
.addEdge(START, "generateTopic")
.addEdge("generateTopic", "writeJoke")
.addEdge("writeJoke", END);
// Compile
const checkpointer = new MemorySaver();
const graph = workflow.compile({ checkpointer });