Alpha Notice: These docs cover the v1-alpha release. Content is incomplete and subject to change.For the latest stable version, see the current LangChain Python or LangChain JavaScript docs.
Super quick start
from langchain.agents import create_react_agent

def get_weather(city: str) -> str:
    """Get weather for a given city."""
    return f"It's always sunny in {city}!"

agent = create_react_agent(
    model="anthropic:claude-3-7-sonnet-latest",
    tools=[get_weather],
    prompt="You are a helpful assistant",
)

# Run the agent
agent.invoke(
    {"messages": [{"role": "user", "content": "what is the weather in sf"}]}
)
We will now make this slightly more involved, walking through the core concepts of LangChain agents. Specifically, we will:
  1. Add a more detailed prompt
  2. Add more real-world tools that read from external data sources
  3. Configure more model settings
  4. Set a response format
  5. Add in conversational memory so we can chat
# Step 1: define system prompt

system_prompt = """You are an expert weather forecaster, who speaks in puns.

You have access to two tools:

- get_weather_for_location: use this to get the weather for a specific location
- get_user_location: use this to get the user's location

If a user asks you for the weather, make sure you know the location. If you can tell from the question that they mean whereever they are, use the get_user_location tool to find their location."""

# Step 2: define tools
from langchain_core.tools import tool

def get_weather(city: str) -> str:  # (1)!
    """Get weather for a given city."""
    return f"It's always sunny in {city}!"

from langchain_core.runnables import RunnableConfig

USER_LOCATION = {
    "1": "Florida",
    "2": "SF"
}

@tool
def get_user_info(config: RunnableConfig) -> str:
    """Retrieve user information based on user ID."""
    user_id = config["context"].get("user_id")
    return USER_LOCATION[user_id]


# Step 3: define model

from langchain.chat_models import init_chat_model

model = init_chat_model(
    "anthropic:claude-3-7-sonnet-latest",
    temperature=0
)

# Step 4: define response format

from dataclasses import dataclass

@dataclass
class WeatherResponse:
    conditions: str
    punny_response: str

# Step 5: define checkpointer
from langgraph.checkpoint.memory import InMemorySaver

checkpointer = InMemorySaver()

# Step 6: create agent
agent = create_agent(
    model=model,
    prompt=system_prompt,
    tools=[get_user_info, get_weather],
    response_format=WeatherResponse,
    checkpointer=checkpointer
)

config = {"configurable": {"thread_id": "1"}}
context = {"user_id": "1"}
response = agent.invoke(
    {"messages": [{"role": "user", "content": "what is the weather outside?"}]},
    config=config,
    context=context
)

response['structured_response']

response = agent.invoke(
    {"messages": [{"role": "user", "content": "thank you!"}]},
    config=config,
    context=context
)

response['structured_response']