import { createDeepAgent } from "deepagents";process.env.OPENAI_API_KEY = "your-api-key";const agent = createDeepAgent({ model: "gpt-4.1" });// this calls initChatModel for the specified model with default parameters// to use specific model parameters, use initChatModel directly
import { createDeepAgent } from "deepagents";process.env.ANTHROPIC_API_KEY = "your-api-key";const agent = createDeepAgent({ model: "claude-sonnet-4-5-20250929" });// this calls initChatModel for the specified model with default parameters// to use specific model parameters, use initChatModel directly
import { createDeepAgent } from "deepagents";process.env.AZURE_OPENAI_API_KEY = "your-api-key";process.env.AZURE_OPENAI_ENDPOINT = "your-endpoint";process.env.OPENAI_API_VERSION = "your-api-version";const agent = createDeepAgent({ model: "azure_openai:gpt-4.1" });// this calls initChatModel for the specified model with default parameters// to use specific model parameters, use initChatModel directly
import { createDeepAgent } from "deepagents";process.env.GOOGLE_API_KEY = "your-api-key";const agent = createDeepAgent({ model: "google-genai:gemini-2.5-flash-lite" });// this calls initChatModel for the specified model with default parameters// to use specific model parameters, use initChatModel directly
import { createDeepAgent } from "deepagents";// Follow the steps here to configure your credentials:// https://docs.aws.amazon.com/bedrock/latest/userguide/getting-started.htmlconst agent = createDeepAgent({ model: "bedrock:gpt-4.1" });// this calls initChatModel for the specified model with default parameters// to use specific model parameters, use initChatModel directly
Deep agents come with a built-in system prompt. The default system prompt contains detailed instructions for using the built-in planning tool, file system tools, and subagents.
When middleware add special tools, like the filesystem tools, it appends them to the system prompt.Each deep agent should also include a custom system prompt specific to its specific use case:
Copy
import { createDeepAgent } from "deepagents";const researchInstructions = `You are an expert researcher. ` + `Your job is to conduct thorough research, and then ` + `write a polished report.`;const agent = createDeepAgent({ systemPrompt: researchInstructions,});
Deep agent tools can make use of virtual file systems to store, access, and edit files. By default, deep agents use a StateBackend.If you are using skills or memory, you must add the expected skill or memory files to the backend before creating the agent.
StateBackend
FilesystemBackend
StoreBackend
CompositeBackend
An ephemeral filesystem backend stored in langgraph state.This filesystem only persists for a single thread.
Copy
# By default we provide a StateBackendagent = create_deep_agent()# Under the hood, it looks likefrom deepagents.backends import StateBackendagent = create_deep_agent( backend=(lambda rt: StateBackend(rt)) # Note that the tools access State through the runtime.state)
The local machine’s filesystem.
This backend grants agents direct filesystem read/write access.
Use with caution and only in appropriate environments.
For more information, see FilesystemBackend.
Copy
from deepagents.backends import FilesystemBackendagent = create_deep_agent( backend=FilesystemBackend(root_dir=".", virtual_mode=True))
A filesystem that provides long-term storage that is persisted across threads.
Copy
from langgraph.store.memory import InMemoryStorefrom deepagents.backends import StoreBackendagent = create_deep_agent( backend=(lambda rt: StoreBackend(rt)), store=InMemoryStore() # Good for local dev; omit for LangSmith Deployment)
When deploying to LangSmith Deployment, omit the store parameter. The platform automatically provisions a store for your agent.
A flexible backend where you can specify different routes in the filesystem to point towards different backends.
Copy
from deepagents import create_deep_agentfrom deepagents.backends import CompositeBackend, StateBackend, StoreBackendfrom langgraph.store.memory import InMemoryStorecomposite_backend = lambda rt: CompositeBackend( default=StateBackend(rt), routes={ "/memories/": StoreBackend(rt), })agent = create_deep_agent( backend=composite_backend, store=InMemoryStore() # Store passed to create_deep_agent, not backend)
Sandboxes are specialized backends that run agent code in an isolated environment with their own filesystem and an execute tool for shell commands.
Use a sandbox backend when you want your deep agent to write files, install dependencies, and run commands without changing anything on your local machine.You configure sandboxes by passing a sandbox backend to backend when creating your deep agent:
Copy
import { createDeepAgent } from "deepagents";import { ChatAnthropic } from "@langchain/anthropic";import { DenoSandbox } from "@langchain/deno";// Create and initialize the sandboxconst sandbox = await DenoSandbox.create({ memoryMb: 1024, lifetime: "10m",});try { const agent = createDeepAgent({ model: new ChatAnthropic({ model: "claude-opus-4-6" }), systemPrompt: "You are a JavaScript coding assistant with sandbox access.", backend: sandbox, }); const result = await agent.invoke({ messages: [ { role: "user", content: "Create a simple HTTP server using Deno.serve and test it with curl", }, ], });} finally { await sandbox.close();}
You can use skills to provide your deep agent with new capabilities and expertise.
While tools tend to cover lower level functionality like native file system actions or planning, skills can contain detailed instructions on how to complete tasks, reference info, and other assets, such as templates.
These files are only loaded by the agent when the agent has determined that the skill is useful for the current prompt.
This progressive disclosure reduces the amount of tokens and context the agent has to consider upon startup.For example skills, see Deep Agent example skills.To add skills to your deep agent, pass them as an argument to create_deep_agent:
StateBackend
StoreBackend
FilesystemBackend
Copy
import { createDeepAgent, type FileData } from "deepagents";import { MemorySaver } from "@langchain/langgraph";const checkpointer = new MemorySaver();function createFileData(content: string): FileData { const now = new Date().toISOString(); return { content: content.split("\n"), created_at: now, modified_at: now, };}const skillsFiles: Record<string, FileData> = {};const skillUrl = "https://raw.githubusercontent.com/langchain-ai/deepagentsjs/refs/heads/main/examples/skills/langgraph-docs/SKILL.md";const response = await fetch(skillUrl);const skillContent = await response.text();skillsFiles["/skills/langgraph-docs/SKILL.md"] = createFileData(skillContent);const agent = await createDeepAgent({ checkpointer, // IMPORTANT: deepagents skill source paths are virtual (POSIX) paths relative to the backend root. skills: ["/skills/"],});const config = { configurable: { thread_id: `thread-${Date.now()}`, },};const result = await agent.invoke( { messages: [ { role: "user", content: "what is langraph? Use the langgraph-docs skill if available.", }, ], files: skillsFiles, }, config,);
Copy
import { createDeepAgent, StoreBackend, type FileData } from "deepagents";import { InMemoryStore, MemorySaver, type BaseStore,} from "@langchain/langgraph";const checkpointer = new MemorySaver();const store = new InMemoryStore();function createFileData(content: string): FileData { const now = new Date().toISOString(); return { content: content.split("\n"), created_at: now, modified_at: now, };}const skillUrl = "https://raw.githubusercontent.com/langchain-ai/deepagentsjs/refs/heads/main/examples/skills/langgraph-docs/SKILL.md";const response = await fetch(skillUrl);const skillContent = await response.text();const fileData = createFileData(skillContent);await store.put(["filesystem"], "/skills/langgraph-docs/SKILL.md", fileData);const backendFactory = (config: { state: unknown; store?: BaseStore }) => { return new StoreBackend({ state: config.state, store: config.store ?? store, });};const agent = await createDeepAgent({ backend: backendFactory, store: store, checkpointer, // IMPORTANT: deepagents skill source paths are virtual (POSIX) paths relative to the backend root. skills: ["/skills/"],});const config = { recursionLimit: 50, configurable: { thread_id: `thread-${Date.now()}`, },};const result = await agent.invoke( { messages: [ { role: "user", content: "what is langraph? Use the langgraph-docs skill if available.", }, ], }, config,);
Copy
import { createDeepAgent, FilesystemBackend } from "deepagents";import { MemorySaver } from "@langchain/langgraph";const checkpointer = new MemorySaver();const backend = new FilesystemBackend({ rootDir: process.cwd() });const agent = await createDeepAgent({ backend, skills: ["./examples/skills/"], interruptOn: { read_file: true, write_file: true, delete_file: true, }, checkpointer, // Required!});const config = { configurable: { thread_id: `thread-${Date.now()}`, },};const result = await agent.invoke( { messages: [ { role: "user", content: "what is langraph? Use the langgraph-docs skill if available.", }, ], }, config,);
Use AGENTS.md files to provide extra context to your deep agent.You can pass one or more file paths to the memory parameter when creating your deep agent:
StateBackend
StoreBackend
Filesystem
Copy
import { createDeepAgent, type FileData } from "deepagents";import { MemorySaver } from "@langchain/langgraph";const AGENTS_MD_URL = "https://raw.githubusercontent.com/langchain-ai/deepagents/refs/heads/main/examples/text-to-sql-agent/AGENTS.md";async function fetchText(url: string): Promise<string> { const res = await fetch(url); if (!res.ok) { throw new Error(`Failed to fetch ${url}: ${res.status} ${res.statusText}`); } return await res.text();}const agentsMd = await fetchText(AGENTS_MD_URL);const checkpointer = new MemorySaver();function createFileData(content: string): FileData { const now = new Date().toISOString(); return { content: content.split("\n"), created_at: now, modified_at: now, };}const agent = await createDeepAgent({ memory: ["/AGENTS.md"], checkpointer: checkpointer,});const result = await agent.invoke( { messages: [ { role: "user", content: "Please tell me what's in your memory files.", }, ], // Seed the default StateBackend's in-state filesystem (virtual paths must start with "/"). files: { "/AGENTS.md": createFileData(agentsMd) }, }, { configurable: { thread_id: "12345" } });
Copy
import { createDeepAgent, StoreBackend, type FileData } from "deepagents"; import { InMemoryStore, MemorySaver, type BaseStore, } from "@langchain/langgraph"; const AGENTS_MD_URL = "https://raw.githubusercontent.com/langchain-ai/deepagents/refs/heads/main/examples/text-to-sql-agent/AGENTS.md"; async function fetchText(url: string): Promise<string> { const res = await fetch(url); if (!res.ok) { throw new Error(`Failed to fetch ${url}: ${res.status} ${res.statusText}`); } return await res.text(); } const agentsMd = await fetchText(AGENTS_MD_URL); function createFileData(content: string): FileData { const now = new Date().toISOString(); return { content: content.split("\n"), created_at: now, modified_at: now, }; } const store = new InMemoryStore(); const fileData = createFileData(agentsMd); await store.put(["filesystem"], "/AGENTS.md", fileData); const checkpointer = new MemorySaver(); const backendFactory = (config: { state: unknown; store?: BaseStore }) => { return new StoreBackend({ state: config.state, store: config.store ?? store, }); }; const agent = await createDeepAgent({ backend: backendFactory, store: store, checkpointer: checkpointer, memory: ["/AGENTS.md"], }); const result = await agent.invoke( { messages: [ { role: "user", content: "Please tell me what's in your memory files.", }, ], }, { configurable: { thread_id: "12345" } } );
Copy
import { createDeepAgent, FilesystemBackend } from "deepagents";import { MemorySaver } from "@langchain/langgraph";// Checkpointer is REQUIRED for human-in-the-loopconst checkpointer = new MemorySaver();const agent = await createDeepAgent({ backend: (config) => new FilesystemBackend({ rootDir: "/Users/user/{project}" }), memory: ["./AGENTS.md", "./.deepagents/AGENTS.md"], interruptOn: { read_file: true, write_file: true, delete_file: true, }, checkpointer, // Required!});
Deep agents support structured ouput.You can set a desired structured output schema by passing it as the responseFormat argument to the call to createDeepAgent().
When the model generates the structured data, it’s captured, validated, and returned in the ‘structuredResponse’ key of the agent’s state.
Copy
import { tool } from "langchain";import { TavilySearch } from "@langchain/tavily";import { createDeepAgent } from "deepagents";import { z } from "zod";const internetSearch = tool( async ({ query, maxResults = 5, topic = "general", includeRawContent = false, }: { query: string; maxResults?: number; topic?: "general" | "news" | "finance"; includeRawContent?: boolean; }) => { const tavilySearch = new TavilySearch({ maxResults, tavilyApiKey: process.env.TAVILY_API_KEY, includeRawContent, topic, }); return await tavilySearch._call({ query }); }, { name: "internet_search", description: "Run a web search", schema: z.object({ query: z.string().describe("The search query"), maxResults: z.number().optional().default(5), topic: z .enum(["general", "news", "finance"]) .optional() .default("general"), includeRawContent: z.boolean().optional().default(false), }), });const weatherReportSchema = z.object({ location: z.string().describe("The location for this weather report"), temperature: z.number().describe("Current temperature in Celsius"), condition: z .string() .describe("Current weather condition (e.g., sunny, cloudy, rainy)"), humidity: z.number().describe("Humidity percentage"), windSpeed: z.number().describe("Wind speed in km/h"), forecast: z.string().describe("Brief forecast for the next 24 hours"),});const agent = await createDeepAgent({ responseFormat: weatherReportSchema, tools: [internetSearch],});const result = await agent.invoke({ messages: [ { role: "user", content: "What's the weather like in San Francisco?", }, ],});console.log(result.structuredResponse);// {// location: 'San Francisco, California',// temperature: 18.3,// condition: 'Sunny',// humidity: 48,// windSpeed: 7.6,// forecast: 'Clear skies with temperatures remaining mild. High of 18°C (64°F) during the day, dropping to around 11°C (52°F) at night.'// }