Structured Output
What You’ll Build
Section titled “What You’ll Build”A prompt that returns structured JSON matching a schema you define — no manual parsing, no malformed output. The LLM is constrained to return exactly the fields you specify, and the Prompty processor auto-parses the result.
Step 1: Define the Output Schema
Section titled “Step 1: Define the Output Schema”Add an outputs block to your .prompty frontmatter. Each property
needs a name, kind (type), and optional description:
---name: weather-reportdescription: Returns structured weather data for a citymodel: id: gpt-4o-mini provider: openai apiType: chat connection: kind: key apiKey: ${env:OPENAI_API_KEY} options: temperature: 0.3inputs: - name: city kind: string default: Seattleoutputs: - name: city kind: string description: The city name - name: temperature kind: integer description: Temperature in degrees Fahrenheit - name: conditions kind: string description: Current weather conditions (e.g. sunny, cloudy, rain)---system:You are a weather data API. Return the current weather for the requested city.
user:What's the weather in {{city}}?Prompty converts outputs into OpenAI’s response_format with
type: "json_schema" and strict mode enabled — the model must return
valid JSON with exactly those fields.
Step 2: Execute and Use the Result
Section titled “Step 2: Execute and Use the Result”from prompty import invoke
result = invoke("weather.prompty", inputs={"city": "Seattle"})
# result is a StructuredResult (dict subclass) — no JSON.parse neededprint(result["city"]) # "Seattle"print(result["temperature"]) # 62print(result["conditions"]) # "Partly cloudy"print(type(result)) # <class 'StructuredResult'>The async variant works identically:
from prompty import invoke_async
result = await invoke_async("weather.prompty", inputs={"city": "Seattle"})print(result["temperature"]) # 62import { invoke } from "@prompty/core";import "@prompty/openai"; // registers provider
const result = await invoke("weather.prompty", { city: "Seattle" });
// result is a StructuredResult — use like a normal objectconsole.log(result.city); // "Seattle"console.log(result.temperature); // 62console.log(result.conditions); // "Partly cloudy"using Prompty.Core;
var result = await Pipeline.InvokeAsync( "weather.prompty", new() { ["city"] = "Seattle" });
// When outputs is defined, result is a StructuredResult (Dictionary subclass)if (result is StructuredResult sr){ Console.WriteLine(sr["city"]); // "Seattle" Console.WriteLine(sr["temperature"]); // 62 Console.WriteLine(sr["conditions"]); // "Partly cloudy"}use serde_json::json;
#[tokio::main]async fn main() -> Result<(), Box<dyn std::error::Error>> { prompty::register_defaults(); prompty_openai::register();
let result = prompty::invoke_from_path( "weather.prompty", Some(&json!({ "city": "Seattle" })), ).await?;
// result is a parsed serde_json::Value matching the outputSchema println!("{}", result["city"]); // "Seattle" println!("{}", result["temperature"]); // 62 println!("{}", result["conditions"]); // "Partly cloudy" Ok(())}Nested Objects
Section titled “Nested Objects”For complex responses, use kind: object with nested properties:
---name: detailed-weathermodel: id: gpt-4o-mini provider: openai apiType: chat connection: kind: key apiKey: ${env:OPENAI_API_KEY}outputs: - name: city kind: string - name: current kind: object properties: - name: temperature kind: integer description: Temperature in °F - name: humidity kind: integer description: Humidity percentage - name: conditions kind: string - name: forecast kind: array description: Next 3 days forecast---system:Return current weather and a 3-day forecast for the requested city.
user:Weather for {{city}}?from prompty import invoke
result = invoke("detailed-weather.prompty", inputs={"city": "Portland"})
print(result["city"]) # "Portland"print(result["current"]["temperature"]) # 58print(result["current"]["humidity"]) # 72print(result["forecast"]) # [{"day": "Mon", ...}, ...]import { invoke } from "@prompty/core";import "@prompty/openai"; // registers provider
const result = await invoke("detailed-weather.prompty", { city: "Portland" });
console.log(result.city); // "Portland"console.log(result.current.temperature); // 58console.log(result.current.humidity); // 72console.log(result.forecast); // [{day: "Mon", ...}, ...]using Prompty.Core;
var result = await Pipeline.InvokeAsync( "detailed-weather.prompty", new() { ["city"] = "Portland" });
if (result is StructuredResult sr){ Console.WriteLine(sr["city"]); // "Portland" var current = (Dictionary<string, object?>)sr["current"]!; Console.WriteLine(current["temperature"]); // 58 Console.WriteLine(current["humidity"]); // 72 Console.WriteLine(sr["forecast"]); // [{...}, ...]}use serde_json::json;
let result = prompty::invoke_from_path( "detailed-weather.prompty", Some(&json!({ "city": "Portland" })),).await?;
println!("{}", result["city"]); // "Portland"println!("{}", result["current"]["temperature"]); // 58println!("{}", result["current"]["humidity"]); // 72println!("{}", result["forecast"]); // [{"day": "Mon", ...}, ...]Step 3: Cast to Typed Objects (Optional)
Section titled “Step 3: Cast to Typed Objects (Optional)”When you need a typed object instead of a dictionary, use cast(). It
deserializes directly from the raw JSON — no dict→JSON→T round-trip.
from dataclasses import dataclassfrom prompty import invoke, cast
@dataclassclass WeatherReport: city: str temperature: int conditions: str
result = invoke("weather.prompty", inputs={"city": "Seattle"})report = cast(result, WeatherReport)print(report.city) # "Seattle"print(type(report)) # <class 'WeatherReport'>
# Or as a one-liner with target_type:report = invoke("weather.prompty", inputs={"city": "Seattle"}, target_type=WeatherReport)Works with Pydantic too (uses model_validate_json for optimal performance):
from pydantic import BaseModel
class WeatherReport(BaseModel): city: str temperature: int conditions: str
report = invoke("weather.prompty", inputs={"city": "Seattle"}, target_type=WeatherReport)import { invoke, cast } from "@prompty/core";import { z } from "zod";
const WeatherSchema = z.object({ city: z.string(), temperature: z.number(), conditions: z.string(),});
// Option 1: Cast after invokeconst result = await invoke("weather.prompty", { city: "Seattle" });const report = cast(result, WeatherSchema.parse);
// Option 2: Typed invoke with validatorconst report2 = await invoke( "weather.prompty", { city: "Seattle" }, { validator: WeatherSchema.parse });using Prompty.Core;
public record WeatherReport(string City, int Temperature, string Conditions);
// Option 1: Cast after invokevar result = await Pipeline.InvokeAsync("weather.prompty", new() { ["city"] = "Seattle" });var report = ((StructuredResult)result).Cast<WeatherReport>();
// Option 2: Generic invokevar report2 = await Pipeline.InvokeAsync<WeatherReport>("weather.prompty", new() { ["city"] = "Seattle" });use serde::Deserialize;use serde_json::json;
#[derive(Deserialize, Debug)]struct WeatherReport { city: String, temperature: i32, conditions: String,}
let result = prompty::invoke_from_path( "weather.prompty", Some(&json!({ "city": "Seattle" })),).await?;
// Deserialize the structured JSON into a typed structlet report: WeatherReport = serde_json::from_value(result)?;println!("{}", report.city); // "Seattle"println!("{:?}", report); // WeatherReport { city: "Seattle", ... }Provider Support
Section titled “Provider Support”The runtime generates this wire format automatically:
{ "type": "json_schema", "json_schema": { "name": "output_schema", "strict": true, "schema": { "type": "object", "properties": { "city": { "type": "string" }, "temperature": { "type": "integer" }, "conditions": { "type": "string" } }, "required": ["city", "temperature", "conditions"], "additionalProperties": false } }}All properties are marked required and additionalProperties is set to
false — the model returns exactly the fields you specified, nothing more.
Complete Tested Example
Section titled “Complete Tested Example”A full, tested example you can copy and run:
"""Structured output with JSON schema.
This example uses outputs schema to get structured JSON from the LLM.Used in: how-to/structured-output.mdx"""from __future__ import annotations
from prompty import invoke, load
agent = load("structured-output.prompty")result = invoke(agent, inputs={"city": "Seattle"})print(f"City: {result['city']}")print(f"Temperature: {result['temperature']}°F")print(f"Conditions: {result['conditions']}")/** * Structured output — LLM returns JSON matching the output schema. * * When a .prompty file defines `outputs:`, the processor automatically * parses the LLM response as JSON. * * @example * ```bash * OPENAI_API_KEY=sk-... npx tsx examples/structured-output.ts * ``` */import "@prompty/openai";import { invoke } from "@prompty/core";import { resolve } from "node:path";
const promptyFile = resolve(import.meta.dirname, "../../prompts/structured-output.prompty");
interface WeatherResult { city: string; temperature: number; conditions: string;}
export async function structuredOutput(city?: string): Promise<WeatherResult> { const result = await invoke(promptyFile, { city: city ?? "Seattle", }); return result as WeatherResult;}
// Run directlyconst weather = await structuredOutput();console.log("Structured response:", JSON.stringify(weather, null, 2));console.log(`City: ${weather.city}`);console.log(`Temperature: ${weather.temperature}°F`);console.log(`Conditions: ${weather.conditions}`);// Copyright (c) Microsoft. All rights reserved.
using Prompty.Core;using Prompty.OpenAI;
namespace DocsExamples.Examples;
/// <summary>/// Structured output — the prompty's outputs schema is converted to/// an OpenAI response_format, ensuring the LLM returns valid JSON./// </summary>public static class StructuredOutput{ /// <summary> /// Load a .prompty with an outputs schema and invoke it. /// The executor converts the outputs schema to response_format automatically. /// </summary> public static async Task<object> RunAsync( string promptyPath, Dictionary<string, object?>? inputs = null) { // One-time setup new PromptyBuilder() .AddOpenAI();
var result = await Pipeline.InvokeAsync(promptyPath, inputs); return result; }}use serde::Deserialize;use serde_json::json;
#[derive(Deserialize, Debug)]struct WeatherReport { city: String, temperature: i32, conditions: String,}
#[tokio::main]async fn main() -> Result<(), Box<dyn std::error::Error>> { prompty::register_defaults(); prompty_openai::register();
let result = prompty::invoke_from_path( "weather.prompty", Some(&json!({ "city": "Seattle" })), ).await?;
let report: WeatherReport = serde_json::from_value(result)?; println!("{report:?}"); Ok(())}Further Reading
Section titled “Further Reading”- Structured Output concept — how the conversion pipeline works under the hood
- Output Schema reference — full
outputssyntax