Skip to main content

Getting Started

These examples demonstrate the core concepts of Fast Agent, from simple agents to custom implementations.

Simple Agent

The most basic Fast Agent application creates an agent with an instruction and runs it interactively:
examples/setup/agent.py
import asyncio

from fast_agent import FastAgent

# Create the application
fast = FastAgent("fast-agent example")

default_instruction = """You are a helpful AI Agent.

{{serverInstructions}}
{{agentSkills}}
{{file_silent:AGENTS.md}}
{{env}}

The current date is {{currentDate}}."""

# Define the agent
@fast.agent(instruction=default_instruction)
async def main():
    # use the --model command line switch or agent arguments to change model
    async with fast.run() as agent:
        await agent.interactive()

if __name__ == "__main__":
    asyncio.run(main())
The {{serverInstructions}} template variable automatically includes documentation for any MCP servers connected to your agent.

Custom Agent Class

Create a custom agent by extending McpAgent or other base classes:
examples/custom-agents/agent.py
import asyncio

from fast_agent import FastAgent
from fast_agent.agents import McpAgent

fast = FastAgent("fast-agent example")

class MyAgent(McpAgent):
    async def initialize(self):
        await super().initialize()
        print("it's a-me!...Mario!")

@fast.custom(MyAgent, instruction="You are a helpful AI Agent")
async def main():
    async with fast.run() as agent:
        await agent.interactive()

if __name__ == "__main__":
    asyncio.run(main())
Override the initialize() method to add custom setup logic when your agent starts.

Tool-Based Agent

Create agents with custom Python functions as tools:
examples/tool-use-agent/agent.py
import asyncio

from fast_agent import FastAgent
from fast_agent.agents.agent_types import AgentConfig
from fast_agent.agents.tool_agent import ToolAgent
from fast_agent.context import Context

def get_video_call_transcript(video_id: str) -> str:
    return "Assistant: Hi, how can I assist you today?\n\nCustomer: Hi, I wanted to ask you about last invoice I received..."

class CustomToolAgent(ToolAgent):
    def __init__(
        self,
        config: AgentConfig,
        context: Context | None = None,
    ):
        tools = [get_video_call_transcript]
        super().__init__(config, tools, context)

fast = FastAgent("Example Tool Use Application")

@fast.custom(CustomToolAgent)
async def main() -> None:
    async with fast.run() as agent:
        await agent.default.generate(
            "What is the topic of the video call no.1234?",
        )

if __name__ == "__main__":
    asyncio.run(main())

Low-Level API Usage

For more control, use the Core API directly:
examples/new-api/simple_llm.py
import asyncio

from mcp.server.fastmcp import FastMCP
from fast_agent.agents.agent_types import AgentConfig
from fast_agent.agents.tool_agent import ToolAgent
from fast_agent.core import Core
from fast_agent.llm.model_factory import ModelFactory

mcp = FastMCP("Weather Bot", log_level="WARNING")

@mcp.tool()
async def check_weather(city: str) -> str:
    """Check the weather in a given city."""
    return f"The weather in {city} is sunny."

def get_temperature(city: str) -> int:
    """Get the temperature in a city."""
    return 22

async def main():
    core: Core = Core()
    await core.initialize()

    config = AgentConfig(name="weather_bot", model="haiku")
    
    tool_agent = ToolAgent(
        config,
        tools=[check_weather, get_temperature],
        context=core.context,
    )

    await tool_agent.attach_llm(ModelFactory.create_factory("haiku"))
    await tool_agent.send("What's the weather like in San Francisco and what's the temperature?")
    await core.cleanup()

if __name__ == "__main__":
    asyncio.run(main())

MCP Agent with Dynamic Configuration

Connect to MCP servers dynamically:
examples/new-api/simple_mcp.py
import asyncio

from fast_agent.agents.agent_types import AgentConfig
from fast_agent.agents.mcp_agent import McpAgent
from fast_agent.core.core_app import Core
from fast_agent.llm.model_factory import ModelFactory

async def main():
    core: Core = Core()
    await core.initialize()

    config = AgentConfig(
        name="dynamic_bot",
        model="gpt-4o-mini",
        servers=["fetch"],  # Connect to fetch MCP server
    )

    agent = McpAgent(
        config,
        connection_persistence=True,
        context=core.context,
    )

    await agent.attach_llm(ModelFactory.create_factory("gpt-4o-mini"))
    await agent.initialize()

    result = await agent.send(
        "Tell me about fast-agent framework. Find info at https://fast-agent.ai/ and summarize it."
    )
    print(result)
    await core.cleanup()

if __name__ == "__main__":
    asyncio.run(main())

Data Analysis with Python Interpreter

Use the interpreter MCP server for data analysis:
examples/data-analysis/analysis.py
import asyncio
from fast_agent import FastAgent

fast = FastAgent("Data Analysis (Roots)")

@fast.agent(
    name="data_analysis",
    instruction="""
You have access to a Python 3.12 interpreter and you can use this to analyse and process data. 
Common analysis packages such as Pandas, Seaborn and Matplotlib are already installed. 
You can add further packages if needed.
Data files are accessible from the /mnt/data/ directory (this is the current working directory).
Visualisations should be saved as .png files in the current working directory.

{{serverInstructions}}
""",
    servers=["interpreter"],
)
async def main() -> None:
    async with fast.run() as agent:
        await agent.interactive()
        await agent.data_analysis(
            "There is a csv file in the current directory. "
            "Analyse the file, produce a detailed description of the data, and any patterns it contains.",
        )
        await agent.data_analysis(
            "Consider the data, and how to usefully group it for presentation to a Human. Find insights, using the Python Interpreter as needed.\n"
            "Use MatPlotLib to produce insightful visualisations. Save them as '.png' files in the current directory. Be sure to run the code and save the files.\n"
            "Produce a summary with major insights to the data",
        )
        await agent()

if __name__ == "__main__":
    asyncio.run(main())

Multimodal: Video Analysis

Work with video content using resource links:
examples/multimodal/video.py
import asyncio

from fast_agent import FastAgent, text_content, video_link
from fast_agent.types import PromptMessageExtended

fast = FastAgent("Video Resource Test")

@fast.agent()
async def main():
    async with fast.run() as agent:
        message = PromptMessageExtended(
            role="user",
            content=[
                text_content("What happens in this video?."),
                video_link("https://www.youtube.com/watch?v=dQw4w9WgXcQ", name="Mystery Video"),
            ],
        )
        await agent.default.generate([message])

if __name__ == "__main__":
    asyncio.run(main())

Next Steps

Workflow Examples

Learn about orchestrators, routers, and parallel workflows

MCP Examples

Explore MCP server integration and elicitations

Advanced Patterns

Discover hooks, RAG, and production patterns

API Reference

Explore the complete API documentation