Agent with Tool Calling
Overview
Section titled “Overview”An agent is a prompt that can call your functions. The flow:
- You define tools in the
.promptyfrontmatter - You register the matching functions in your code
- The runtime sends the tools to the LLM
- If the LLM returns a
tool_callsresponse, the runtime calls your function, appends the result to the conversation, and calls the LLM again - This loops until the LLM returns a normal text response
User message → LLM (with tool definitions) → tool_calls: get_weather("Seattle") → Your function returns "72°F and sunny" → LLM (with tool result in context) → "The weather in Seattle is 72°F and sunny!"1. Define Your Tool Functions
Section titled “1. Define Your Tool Functions”from prompty import tool
@tooldef get_weather(city: str) -> str: """Get the current weather for a city.""" return f"72°F and sunny in {city}"
@tooldef get_time(timezone: str) -> str: """Get the current time in a timezone.""" from datetime import datetime, timezone as tz return datetime.now(tz.utc).isoformat()import { tool } from "@prompty/core";
const getWeather = tool( (city: string) => `72°F and sunny in ${city}`, { name: "get_weather", description: "Get the current weather for a city", parameters: [{ name: "city", kind: "string", required: true }], },);
const getTime = tool( (timezone: string) => new Date().toISOString(), { name: "get_time", description: "Get the current time in a timezone", parameters: [{ name: "timezone", kind: "string", required: true }], },);using Prompty.Core;
public class AgentTools{ [Tool(Name = "get_weather", Description = "Get the current weather")] public string GetWeather(string city) { return $"72°F and sunny in {city}"; }
[Tool(Name = "get_time", Description = "Get the current time")] public string GetTime(string timezone) { return DateTime.UtcNow.ToString("o"); }}prompty::register_tool_handler("get_weather", |args| { Box::pin(async move { let city = args["city"].as_str().unwrap_or("unknown"); Ok(serde_json::json!(format!("72°F and sunny in {city}"))) })});
prompty::register_tool_handler("get_time", |args| { Box::pin(async move { let _tz = args["timezone"].as_str().unwrap_or("UTC"); Ok(serde_json::json!(chrono::Utc::now().to_rfc3339())) })});2. Write the .prompty File
Section titled “2. Write the .prompty File”Create agent.prompty:
---name: weather-agentdescription: An agent that can check weather and timemodel: id: gpt-4o-mini provider: openai apiType: chat connection: kind: key apiKey: ${env:OPENAI_API_KEY} options: temperature: 0tools: - name: get_weather kind: function description: Get the current weather for a city parameters: - name: city kind: string description: The city name, e.g. "Seattle" required: true - name: get_time kind: function description: Get the current time in a timezone parameters: - name: timezone kind: string description: IANA timezone, e.g. "America/New_York" required: trueinputs: - name: question kind: string default: What's the weather in Seattle?---system:You are a helpful assistant with access to weather and time tools.Always use the tools when the user asks about weather or time.
user:{{question}}3. Run the Agent
Section titled “3. Run the Agent”from prompty import load, turn, tool, bind_tools
# Define your tool functions with @tool@tooldef get_weather(city: str) -> str: """Get the current weather for a city.""" return f"72°F and sunny in {city}"
@tooldef get_time(timezone: str) -> str: """Get the current time in a timezone.""" from datetime import datetime, timezone as tz return datetime.now(tz.utc).isoformat()
# Load and validate tools against the .prompty declarationsagent = load("agent.prompty")tools = bind_tools(agent, [get_weather, get_time])
# Execute with the agent loop — tools are called automaticallyresult = turn( agent, inputs={"question": "What's the weather in Seattle and the time in Tokyo?"}, tools=tools,)print(result)# → "The weather in Seattle is 72°F and sunny, and the current time in Tokyo is ..."import { load, turn, tool, bindTools } from "@prompty/core";
const getWeather = tool( (city: string) => `72°F and sunny in ${city}`, { name: "get_weather", description: "Get the current weather for a city", parameters: [{ name: "city", kind: "string", required: true }], },);
const getTime = tool( (timezone: string) => new Date().toISOString(), { name: "get_time", description: "Get the current time in a timezone", parameters: [{ name: "timezone", kind: "string", required: true }], },);
// Load and validate tools against the .prompty declarationsconst agent = await load("agent.prompty");const tools = bindTools(agent, [getWeather, getTime]);
const result = await turn(agent, { question: "What's the weather in Seattle?",}, { tools });
console.log(result);using Prompty.Core;
public class WeatherTools{ [Tool(Name = "get_weather", Description = "Get the current weather")] public string GetWeather(string city) { return $"72°F and sunny in {city}"; }
[Tool(Name = "get_time", Description = "Get the current time")] public string GetTime(string timezone) { return DateTime.UtcNow.ToString("o"); }}
// Load and validate [Tool] methods against the .prompty declarationsvar agent = PromptyLoader.Load("agent.prompty");var service = new WeatherTools();var tools = ToolAttribute.BindTools(agent, service);
var result = await Pipeline.TurnAsync( agent, new() { ["question"] = "What's the weather in Seattle and the time in Tokyo?" }, tools: tools);
Console.WriteLine(result);use serde_json::json;use prompty::TurnOptions;
#[tokio::main]async fn main() -> Result<(), Box<dyn std::error::Error>> { prompty::register_defaults(); prompty_openai::register();
prompty::register_tool_handler("get_weather", |args| { Box::pin(async move { let city = args["city"].as_str().unwrap_or("unknown"); Ok(json!(format!("72°F and sunny in {city}"))) }) });
prompty::register_tool_handler("get_time", |args| { Box::pin(async move { let _tz = args["timezone"].as_str().unwrap_or("UTC"); Ok(json!(chrono::Utc::now().to_rfc3339())) }) });
let result = prompty::turn_from_path( "agent.prompty", Some(&json!({ "question": "What's the weather in Seattle and the time in Tokyo?" })), Some(TurnOptions { max_iterations: Some(10), ..Default::default() }), ).await?; println!("{result}"); Ok(())}4. Async Tool Functions
Section titled “4. Async Tool Functions”If your tools call external APIs, use async functions to avoid blocking:
import httpxfrom prompty import load, turn_async, tool, bind_tools
@toolasync def get_weather(city: str) -> str: """Get the current weather for a city.""" async with httpx.AsyncClient() as client: resp = await client.get(f"https://api.weather.com/v1/{city}") data = resp.json() return f"{data['temp']}°F, {data['condition']}"
async def main(): agent = load("agent.prompty") tools = bind_tools(agent, [get_weather]) result = await turn_async( agent, inputs={"question": "Weather in London?"}, tools=tools, ) print(result)import { load, turn, tool, bindTools } from "@prompty/core";
const getWeather = tool( async (city: string) => { const resp = await fetch(`https://api.weather.com/v1/${city}`); const data = await resp.json(); return `${data.temp}°F, ${data.condition}`; }, { name: "get_weather", description: "Get the current weather for a city", parameters: [{ name: "city", kind: "string", required: true }], },);
const agent = await load("agent.prompty");const tools = bindTools(agent, [getWeather]);
const result = await turn(agent, { question: "Weather in London?",}, { tools });using Prompty.Core;
public class AsyncWeatherTools{ [Tool(Name = "get_weather", Description = "Get the current weather")] public async Task<string> GetWeather(string city) { using var client = new HttpClient(); var resp = await client.GetStringAsync( $"https://api.weather.com/v1/{city}"); return resp; }}
var agent = PromptyLoader.Load("agent.prompty");var service = new AsyncWeatherTools();var tools = ToolAttribute.BindTools(agent, service);
var result = await Pipeline.TurnAsync( agent, new() { ["question"] = "Weather in London?" }, tools: tools);prompty::register_tool_handler("get_weather", |args| { Box::pin(async move { let city = args["city"].as_str().unwrap_or("unknown"); let client = reqwest::Client::new(); let resp = client .get(format!("https://api.weather.com/v1/{city}")) .send() .await .map_err(|e| prompty::InvokerError::ExecutionError(e.to_string()))?; let data: serde_json::Value = resp.json().await .map_err(|e| prompty::InvokerError::ExecutionError(e.to_string()))?; Ok(serde_json::json!(format!( "{}°F, {}", data["temp"], data["condition"] ))) })});5. Multiple Tools Example
Section titled “5. Multiple Tools Example”You can define as many tools as needed. Here’s a more complete agent with database and search capabilities:
---name: research-agentmodel: id: gpt-4o provider: openai apiType: chat connection: kind: key apiKey: ${env:OPENAI_API_KEY} options: temperature: 0tools: - name: search_docs kind: function description: Search internal documentation parameters: - name: query kind: string description: The search query required: true - name: limit kind: integer description: Max number of results (default 5) - name: get_user kind: function description: Look up a user by email parameters: - name: email kind: string description: The user's email address required: true - name: send_email kind: function description: Send an email to a user parameters: - name: to kind: string description: Recipient email required: true - name: subject kind: string description: Email subject required: true - name: body kind: string description: Email body required: trueinputs: - name: request kind: string---system:You are an office assistant.You can search docs, look up users, and send emails.Always confirm before sending emails.
user:{{request}}from prompty import load, turn, tool, bind_tools
@tooldef search_docs(query: str, limit: int = 5) -> str: """Search internal documentation.""" return f"Found {limit} results for '{query}'"
@tooldef get_user(email: str) -> str: """Look up a user by email.""" return '{"name": "Jane Doe", "email": "jane@example.com", "role": "Engineer"}'
@tooldef send_email(to: str, subject: str, body: str) -> str: """Send an email to a user.""" return f"Email sent to {to}"
agent = load("research-agent.prompty")tools = bind_tools(agent, [search_docs, get_user, send_email])
result = turn( agent, inputs={"request": "Find docs about onboarding and email a summary to jane@example.com"}, tools=tools,)import { load, turn, tool, bindTools } from "@prompty/core";
const searchDocs = tool( (query: string, limit = 5) => `Found ${limit} results for '${query}'`, { name: "search_docs", description: "Search internal documentation", parameters: [ { name: "query", kind: "string", required: true }, { name: "limit", kind: "integer", default: 5 }, ], },);
const getUser = tool( (email: string) => JSON.stringify({ name: "Jane Doe", email, role: "Engineer" }), { name: "get_user", description: "Look up a user by email", parameters: [{ name: "email", kind: "string", required: true }], },);
const sendEmail = tool( (to: string, subject: string, body: string) => `Email sent to ${to}`, { name: "send_email", description: "Send an email to a user", parameters: [ { name: "to", kind: "string", required: true }, { name: "subject", kind: "string", required: true }, { name: "body", kind: "string", required: true }, ], },);
const agent = await load("research-agent.prompty");const tools = bindTools(agent, [searchDocs, getUser, sendEmail]);
const result = await turn(agent, { request: "Find docs about onboarding and email a summary to jane@example.com",}, { tools });using Prompty.Core;
public class ResearchTools{ [Tool(Name = "search_docs", Description = "Search internal documentation")] public string SearchDocs(string query, int limit = 5) { return $"Found {limit} results for '{query}'"; }
[Tool(Name = "get_user", Description = "Look up a user by email")] public string GetUser(string email) { return $$"""{"name": "Jane Doe", "email": "{{email}}", "role": "Engineer"}"""; }
[Tool(Name = "send_email", Description = "Send an email to a user")] public string SendEmail(string to, string subject, string body) { return $"Email sent to {to}"; }}
var agent = PromptyLoader.Load("research-agent.prompty");var service = new ResearchTools();var tools = ToolAttribute.BindTools(agent, service);
var result = await Pipeline.TurnAsync( agent, new() { ["request"] = "Find docs about onboarding and email a summary to jane@example.com" }, tools: tools);use serde_json::json;use prompty::TurnOptions;
prompty::register_tool_handler("search_docs", |args| { Box::pin(async move { let query = args["query"].as_str().unwrap_or(""); let limit = args["limit"].as_i64().unwrap_or(5); Ok(json!(format!("Found {limit} results for '{query}'"))) })});
prompty::register_tool_handler("get_user", |args| { Box::pin(async move { let email = args["email"].as_str().unwrap_or(""); Ok(json!({"name": "Jane Doe", "email": email, "role": "Engineer"})) })});
prompty::register_tool_handler("send_email", |args| { Box::pin(async move { let to = args["to"].as_str().unwrap_or(""); Ok(json!(format!("Email sent to {to}"))) })});
let result = prompty::turn_from_path( "research-agent.prompty", Some(&json!({ "request": "Find docs about onboarding and email a summary to jane@example.com" })), Some(TurnOptions { max_iterations: Some(10), ..Default::default() }),).await?;6. Error Handling
Section titled “6. Error Handling”from prompty import load, turn, tool, bind_tools
@tooldef get_weather(city: str) -> str: """Get the current weather for a city.""" return f"72°F and sunny in {city}"
agent = load("agent.prompty")
# bind_tools catches mismatches immediately — no waiting for the LLMtry: tools = bind_tools(agent, [get_weather]) # If agent.prompty also declares get_time, bind_tools warns # that it's unbound (but doesn't error — it may be in the registry) result = turn(agent, inputs={"question": "Weather?"}, tools=tools)except ValueError as e: print(f"Binding error: {e}")except Exception as e: print(f"Execution error: {e}")import { load, turn, tool, bindTools } from "@prompty/core";
const getWeather = tool( (city: string) => `72°F and sunny in ${city}`, { name: "get_weather", description: "Get the current weather", parameters: [{ name: "city", kind: "string", required: true }], },);
try { const agent = await load("agent.prompty"); const tools = bindTools(agent, [getWeather]); const result = await turn(agent, { question: "Weather?" }, { tools });} catch (error) { console.error("Agent error:", error);}using Prompty.Core;
public class MyTools{ [Tool(Name = "get_weather", Description = "Get the current weather")] public string GetWeather(string city) => $"72°F and sunny in {city}";}
try{ var agent = PromptyLoader.Load("agent.prompty"); var service = new MyTools(); // BindTools throws if a [Tool] method doesn't match a declaration var tools = ToolAttribute.BindTools(agent, service); var result = await Pipeline.TurnAsync( agent, new() { ["question"] = "What's the weather?" }, tools: tools );}catch (InvalidOperationException ex){ Console.WriteLine($"Binding error: {ex.Message}");}catch (Exception ex){ Console.WriteLine($"Execution error: {ex.Message}");}use serde_json::json;use prompty::TurnOptions;
prompty::register_tool_handler("get_weather", |args| { Box::pin(async move { let city = args["city"].as_str().unwrap_or("unknown"); Ok(json!(format!("72°F and sunny in {city}"))) })});
match prompty::turn_from_path( "agent.prompty", Some(&json!({ "question": "Weather?" })), Some(TurnOptions { max_iterations: Some(10), ..Default::default() }),).await { Ok(result) => println!("{result}"), Err(e) => eprintln!("Agent error: {e}"),}Common pitfalls:
| Issue | Cause | Fix |
|---|---|---|
ValueError: Tool 'X' not found | Function not registered | Add it to tool_functions |
| Agent loops forever | LLM keeps calling tools | Set maxOutputTokens or add “respond when done” to the system prompt |
| Wrong arguments passed | Schema mismatch | Ensure parameters in .prompty match your function signature |
| Tool returns non-string | Runtime expects string | Always return a string from tool functions (use json.dumps() for objects) |
Complete Tested Example
Section titled “Complete Tested Example”A full, tested example you can copy and run:
"""Agent with tool calling — register tools and run the agent loop.
This example shows how to define tools and run an agent that calls them.Used in: how-to/agent-tool-calling.mdx"""from __future__ import annotations
from prompty import turn, loadfrom prompty.core import tool
@tooldef get_weather(city: str) -> str: """Get the current weather for a city.""" return f"72°F and sunny in {city}"
agent = load("chat-agent.prompty")result = turn( agent, inputs={"question": "What's the weather in Seattle?"}, tools={"get_weather": get_weather},)print(result)/** * Agent with tool calling — define tools, load agent prompty, invoke. * * Uses `tool()` to wrap functions with metadata and `bindTools()` to * validate handlers against the agent's declared tools. * * @example * ```bash * OPENAI_API_KEY=sk-... npx tsx examples/agent-tool-calling.ts * ``` */import "@prompty/openai";import { tool, bindTools, turn, load } from "@prompty/core";import { resolve } from "node:path";
const promptyFile = resolve(import.meta.dirname, "../../prompts/chat-agent.prompty");
// Define a tool with typed parametersconst getWeather = tool( (args: Record<string, unknown>) => { const city = args.city as string; return JSON.stringify({ city, temperature: 72, conditions: "sunny" }); }, { name: "get_weather", description: "Get the current weather for a city", parameters: [ { name: "city", kind: "string", description: "City name", required: true }, ], },);
export async function agentToolCalling(question?: string): Promise<string> { const agent = load(promptyFile);
// Validate tool handlers against agent's declared tools const tools = bindTools(agent, [getWeather]);
// turn runs the LLM loop: call → tool dispatch → call → ... const result = await turn(agent, { question: question ?? "What's the weather in Seattle?", }, { tools, maxIterations: 5, });
return result as string;}
// Run directlyconst response = await agentToolCalling();console.log(response);// Copyright (c) Microsoft. All rights reserved.
using Prompty.Core;using Prompty.OpenAI;
namespace DocsExamples.Examples;
/// <summary>/// Agent with tool calling — define tools, load agent prompty, invoke with tools./// </summary>public static class AgentToolCalling{ /// <summary> /// Runs an agent loop that can call registered tools until the LLM /// returns a final response or max iterations is reached. /// </summary> public static async Task<object> RunAsync( string promptyPath, Dictionary<string, Func<string, Task<string>>> tools, Dictionary<string, object?>? inputs = null, int maxIterations = 10) { // One-time setup new PromptyBuilder() .AddOpenAI();
// Register built-in tool handlers (function, mcp, etc.) ToolDispatch.RegisterBuiltins();
// Load the agent var agent = PromptyLoader.Load(promptyPath);
// Run the agent loop with tool dispatch var result = await Pipeline.TurnAsync( agent, inputs, tools: tools, maxIterations: maxIterations);
return result; }
/// <summary> /// Runs an agent using the [Tool] attribute for automatic tool discovery. /// </summary> public static async Task<object> RunWithToolAttributeAsync( string promptyPath, object toolInstance, Dictionary<string, object?>? inputs = null, int maxIterations = 10) { // One-time setup new PromptyBuilder() .AddOpenAI(); ToolDispatch.RegisterBuiltins();
// Load and bind tools var agent = PromptyLoader.Load(promptyPath); var tools = ToolAttribute.BindTools(agent, toolInstance);
var result = await Pipeline.TurnAsync( agent, inputs, tools: tools, maxIterations: maxIterations);
return result; }}
/// <summary>/// Example tool class using the [Tool] attribute for auto-discovery./// </summary>public class WeatherTools{ [ToolAttribute(Name = "get_weather", Description = "Get the current weather for a city")] public string GetWeather(string city) { return $"72°F and sunny in {city}"; }}use serde_json::json;use prompty::TurnOptions;
#[tokio::main]async fn main() -> Result<(), Box<dyn std::error::Error>> { prompty::register_defaults(); prompty_openai::register();
// Register tool handlers prompty::register_tool_handler("get_weather", |args| { Box::pin(async move { let city = args["city"].as_str().unwrap_or("unknown"); Ok(json!(format!("72°F and sunny in {city}"))) }) });
// Run the agent loop — tools are called automatically let result = prompty::turn_from_path( "agent.prompty", Some(&json!({ "question": "What's the weather in Seattle?" })), Some(TurnOptions { max_iterations: Some(10), ..Default::default() }), ).await?; println!("{result}"); Ok(())}Further Reading
Section titled “Further Reading”- Agent Mode Concepts — how the agent loop works internally
- Tools & Tool Registry — custom tool kinds and wire projection
- Chat Assistant Tutorial — end-to-end walkthrough with tool calling
- Troubleshooting — common issues and solutions