import uuid
from typing_extensions import TypedDict, NotRequired
from langgraph.graph import StateGraph, START, END
from langchain.chat_models import init_chat_model
from langgraph.checkpoint.memory import InMemorySaver
class State(TypedDict):
topic: NotRequired[str]
joke: NotRequired[str]
model = init_chat_model(
"claude-sonnet-4-5-20250929",
temperature=0,
)
def generate_topic(state: State):
"""LLM call to generate a topic for the joke"""
msg = model.invoke("Give me a funny topic for a joke")
return {"topic": msg.content}
def write_joke(state: State):
"""LLM call to write a joke based on the topic"""
msg = model.invoke(f"Write a short joke about {state['topic']}")
return {"joke": msg.content}
# Build workflow
workflow = StateGraph(State)
# Add nodes
workflow.add_node("generate_topic", generate_topic)
workflow.add_node("write_joke", write_joke)
# Add edges to connect nodes
workflow.add_edge(START, "generate_topic")
workflow.add_edge("generate_topic", "write_joke")
workflow.add_edge("write_joke", END)
# Compile
checkpointer = InMemorySaver()
graph = workflow.compile(checkpointer=checkpointer)
graph