Chapter 2 — Your First Agent
Chapter 2 — Your First Agent
Section titled “Chapter 2 — Your First Agent”What you’ll learn: four concrete agent patterns you can copy-paste and adapt — a linear chat pipeline, conditional routing, looping with a safeguard counter, and streaming execution.
Time: ~20 minutes.
Prereqs: Chapter 1 — Setup & Core Concepts.
Simple Agents
Section titled “Simple Agents”Example 1: Linear Chat Pipeline
Section titled “Example 1: Linear Chat Pipeline”A basic chatbot with no branching:
from langgraph.graph import StateGraph, START, ENDfrom langchain_anthropic import ChatAnthropicfrom langgraph.graph.message import add_messagesfrom typing_extensions import TypedDictfrom typing import Annotated
class State(TypedDict): messages: Annotated[list, add_messages] user_name: str
# Construct the model ONCE at module scope and reuse it across every node# invocation — creating a ChatAnthropic client per call is unnecessarily# expensive in production. Nodes should stay focused on state transforms.model = ChatAnthropic(model="claude-3-5-sonnet-20241022")
def fetch_user_context(state: State) -> dict: """Load user info from database.""" # Simulate DB lookup return {"user_name": "Alice"}
def call_model(state: State) -> dict: """Call LLM with messages.""" system_prompt = f"You're helping {state['user_name']}. Be concise." response = model.invoke(state["messages"], system_prompt=system_prompt) return {"messages": [response]}
def save_conversation(state: State) -> dict: """Persist messages to database.""" # Save state["messages"] to DB return {}
# Build the graphbuilder = StateGraph(State)builder.add_node("fetch_context", fetch_user_context)builder.add_node("model", call_model)builder.add_node("save", save_conversation)
builder.add_edge(START, "fetch_context")builder.add_edge("fetch_context", "model")builder.add_edge("model", "save")builder.add_edge("save", END)
# Compile with persistencefrom langgraph.checkpoint.memory import InMemorySavergraph = builder.compile(checkpointer=InMemorySaver())
# Use itconfig = {"configurable": {"thread_id": "chat-session-1"}}result = graph.invoke( {"messages": [{"role": "user", "content": "What's the weather?"}]}, config=config)
# Continue in same thread - context preservedresult = graph.invoke( {"messages": [{"role": "user", "content": "What did you say before?"}]}, config=config)Example 2: Conditional Routing
Section titled “Example 2: Conditional Routing”Route based on message type:
from langgraph.types import Sendfrom langgraph.graph import StateGraph, START, END
class State(TypedDict): query: str query_type: str result: str
# Reuse a single model instance across all node calls.model = ChatAnthropic(model="claude-3-5-sonnet-20241022")
def classify_query(state: State) -> dict: """Determine query type.""" query = state["query"].lower()
if any(word in query for word in ["search", "find", "lookup"]): return {"query_type": "search"} elif any(word in query for word in ["calculate", "math", "solve"]): return {"query_type": "math"} else: return {"query_type": "general"}
def search_web(state: State) -> dict: """Handle search queries.""" # Call search API result = f"Search results for: {state['query']}" return {"result": result}
def solve_math(state: State) -> dict: """Handle math queries.""" result = f"Math answer for: {state['query']}" return {"result": result}
def general_response(state: State) -> dict: """Handle general queries.""" response = model.invoke(state["query"]) return {"result": response.content}
# Build graph with conditional routingbuilder = StateGraph(State)builder.add_node("classify", classify_query)builder.add_node("search", search_web)builder.add_node("math", solve_math)builder.add_node("general", general_response)
# Route based on classificationdef route_to_handler(state: State) -> str: return state["query_type"]
builder.add_edge(START, "classify")builder.add_conditional_edges( "classify", route_to_handler, { "search": "search", "math": "math", "general": "general" })
# All handlers lead to ENDfor handler in ["search", "math", "general"]: builder.add_edge(handler, END)
graph = builder.compile()
# Test itresult = graph.invoke({"query": "What's the population of Tokyo?"})print(result["result"]) # Routes to search
result = graph.invoke({"query": "Calculate 15% of 2000"})print(result["result"]) # Routes to mathExample 3: Looping with Counter
Section titled “Example 3: Looping with Counter”Agent that can loop (with limits):
class LoopState(TypedDict): iteration: int data: str final_result: str
def process_step(state: LoopState) -> dict: """Do one iteration of processing.""" processed = state["data"] + f" [step-{state['iteration']}]" return { "data": processed, "iteration": state["iteration"] + 1 }
def should_continue(state: LoopState) -> str: """Decide whether to loop or finish.""" if state["iteration"] >= 3: return "finish" return "continue"
def finalize(state: LoopState) -> dict: """Final processing.""" return {"final_result": state["data"]}
builder = StateGraph(LoopState)builder.add_node("process", process_step)builder.add_node("finalize", finalize)
builder.add_edge(START, "process")builder.add_conditional_edges( "process", should_continue, { "continue": "process", # Loop back to self "finish": "finalize" })builder.add_edge("finalize", END)
graph = builder.compile()
# Looping with safeguardconfig = {"configurable": {"thread_id": "loop-test"}}result = graph.invoke( {"iteration": 0, "data": "start"}, config=config)print(result)# Output: {'iteration': 3, 'data': 'start [step-0] [step-1] [step-2]', 'final_result': '...'}Example 4: Streaming Output
Section titled “Example 4: Streaming Output”See the graph execute step-by-step:
# Different streaming modesconfig = {"configurable": {"thread_id": "stream-test"}}
# Mode 1: "values" - full state after each stepprint("=== Streaming Values ===")for event in graph.stream( {"iteration": 0, "data": "test"}, config=config, stream_mode="values"): print(f"State: {event}\n")
# Mode 2: "updates" - only what changedprint("\n=== Streaming Updates ===")for event in graph.stream( {"iteration": 0, "data": "test"}, config=config, stream_mode="updates"): for node_name, updates in event.items(): print(f"{node_name} updated: {updates}\n")
# Mode 3: "debug" - node execution traceprint("\n=== Debug Mode ===")for event in graph.stream( {"iteration": 0, "data": "test"}, config=config, stream_mode="debug"): print(f"Debug: {event}\n")