import { v4 as uuidv4 } from "uuid";
import * as z from "zod";
import { StateGraph, StateSchema, GraphNode, START, END, MemorySaver } from "@langchain/langgraph";
import { ChatAnthropic } from "@langchain/anthropic";
const State = new StateSchema({
topic: z.string().optional(),
joke: z.string().optional(),
});
const model = new ChatAnthropic({
model: "claude-sonnet-4-5-20250929",
temperature: 0,
});
const generateTopic: GraphNode<typeof State> = async (state) => {
// LLM call to generate a topic for the joke
const msg = await model.invoke("Give me a funny topic for a joke");
return { topic: msg.content };
};
const writeJoke: GraphNode<typeof State> = async (state) => {
// LLM call to write a joke based on the topic
const msg = await model.invoke(`Write a short joke about ${state.topic}`);
return { joke: msg.content };
};
// Build workflow
const workflow = new StateGraph(State)
// Add nodes
.addNode("generateTopic", generateTopic)
.addNode("writeJoke", writeJoke)
// Add edges to connect nodes
.addEdge(START, "generateTopic")
.addEdge("generateTopic", "writeJoke")
.addEdge("writeJoke", END);
// Compile
const checkpointer = new MemorySaver();
const graph = workflow.compile({ checkpointer });