Documentation Index
Fetch the complete documentation index at: https://docs.runlayer.com/llms.txt
Use this file to discover all available pages before exploring further.
The OpenAI Agents SDK supports MCP servers with multiple transport options, enabling Python agents to use MCP tools.
Installation
pip install openai-agents
Streamable HTTP (Production)
Use MCPServerStreamableHttp for self-managed MCP servers:
import asyncio
from agents import Agent, Runner
from agents.mcp import MCPServerStreamableHttp
async def main():
async with MCPServerStreamableHttp(
name="GitHub Server",
params={
"url": "https://mcp.runlayer.com/github-a1b2c3/mcp",
"headers": {"x-runlayer-api-key": "your-api-key"},
"timeout": 10,
},
cache_tools_list=True,
) as server:
agent = Agent(
name="GitHub Assistant",
instructions="Use GitHub tools to help with repository tasks.",
mcp_servers=[server],
)
result = await Runner.run(
agent,
"Get info about the vercel/ai repository"
)
print(result.final_output)
asyncio.run(main())
stdio Transport (Local Development)
For local Runlayer MCP servers:
from pathlib import Path
from agents.mcp import MCPServerStdio
async with MCPServerStdio(
name="Local Runlayer Server",
params={
"command": "uvx",
"args": [
"runlayer",
"936ac3e8-bb75-428d-8db1-b1f08ff07816",
"--secret", "your-secret-key",
"--host", "https://mcp.runlayer.com"
],
},
) as server:
agent = Agent(
name="Assistant",
instructions="Help with tasks using local MCP server.",
mcp_servers=[server],
)
result = await Runner.run(agent, "Use the available tools")
print(result.final_output)
Let OpenAI’s infrastructure call MCP servers:
from agents import Agent, Runner, HostedMCPTool
agent = Agent(
name="Assistant",
tools=[
HostedMCPTool(
tool_config={
"type": "mcp",
"server_label": "github",
"server_url": "https://mcp.runlayer.com/github-a1b2c3/mcp",
"require_approval": "never",
}
)
],
)
result = await Runner.run(agent, "Get repository info")
print(result.final_output)
Filter which tools to expose:
from agents.mcp import MCPServerStreamableHttp, create_static_tool_filter
async with MCPServerStreamableHttp(
name="GitHub Server",
params={"url": "https://mcp.runlayer.com/github-a1b2c3/mcp"},
tool_filter=create_static_tool_filter(
allowed_tool_names=["get_repository", "list_issues"]
),
) as server:
agent = Agent(name="Assistant", mcp_servers=[server])
result = await Runner.run(agent, "Get repo info")
print(result.final_output)
Multiple MCP Servers
Combine tools from multiple servers:
import asyncio
from agents import Agent, Runner
from agents.mcp import MCPServerStreamableHttp
async def main():
async with MCPServerStreamableHttp(
name="GitHub",
params={
"url": "https://mcp.runlayer.com/github-a1b2c3/mcp",
"headers": {"x-runlayer-api-key": "your-api-key"},
},
) as github_server, MCPServerStreamableHttp(
name="Linear",
params={
"url": "https://mcp.runlayer.com/linear-d4e5f6/mcp",
"headers": {"x-runlayer-api-key": "your-api-key"},
},
) as linear_server:
agent = Agent(
name="Project Manager",
instructions="Help coordinate GitHub and Linear tasks.",
mcp_servers=[github_server, linear_server],
)
result = await Runner.run(
agent,
"Create a GitHub issue and a Linear ticket for the bug"
)
print(result.final_output)
asyncio.run(main())
Using MCP Prompts
Fetch prompts from MCP servers:
async with MCPServerStreamableHttp(
name="Server",
params={"url": "https://mcp.runlayer.com/server-xyz/mcp"},
) as server:
# Get available prompts
prompts = await server.list_prompts()
# Fetch a specific prompt
prompt_result = await server.get_prompt(
"generate_instructions",
{"focus": "code review", "language": "python"}
)
instructions = prompt_result.messages[0].content.text
agent = Agent(
name="Code Reviewer",
instructions=instructions,
mcp_servers=[server],
)
Resources