Installation
Copy
Ask AI
pip install agents
Streamable HTTP (Production)
UseMCPServerStreamableHttp for self-managed MCP servers:
Copy
Ask AI
import asyncio
from agents import Agent, Runner
from agents.mcp import MCPServerStreamableHttp
async def main():
async with MCPServerStreamableHttp(
name="GitHub Server",
params={
"url": "https://mcp.runlayer.com/github-a1b2c3/mcp",
"headers": {"x-runlayer-api-key": "your-api-key"},
"timeout": 10,
},
cache_tools_list=True,
) as server:
agent = Agent(
name="GitHub Assistant",
instructions="Use GitHub tools to help with repository tasks.",
mcp_servers=[server],
)
result = await Runner.run(
agent,
"Get info about the vercel/ai repository"
)
print(result.final_output)
asyncio.run(main())
stdio Transport (Local Development)
For local Runlayer MCP servers:Copy
Ask AI
from pathlib import Path
from agents.mcp import MCPServerStdio
async with MCPServerStdio(
name="Local Runlayer Server",
params={
"command": "runlayer",
"args": [
"936ac3e8-bb75-428d-8db1-b1f08ff07816",
"--secret", "your-secret-key",
"--host", "https://mcp.runlayer.com"
],
},
) as server:
agent = Agent(
name="Assistant",
instructions="Help with tasks using local MCP server.",
mcp_servers=[server],
)
result = await Runner.run(agent, "Use the available tools")
print(result.final_output)
Hosted MCP Tools
Let OpenAI’s infrastructure call MCP servers:Copy
Ask AI
from agents import Agent, Runner, HostedMCPTool
agent = Agent(
name="Assistant",
tools=[
HostedMCPTool(
tool_config={
"type": "mcp",
"server_label": "github",
"server_url": "https://mcp.runlayer.com/github-a1b2c3/mcp",
"require_approval": "never",
}
)
],
)
result = await Runner.run(agent, "Get repository info")
print(result.final_output)
Tool Filtering
Filter which tools to expose:Copy
Ask AI
from agents.mcp import MCPServerStreamableHttp, create_static_tool_filter
async with MCPServerStreamableHttp(
name="GitHub Server",
params={"url": "https://mcp.runlayer.com/github-a1b2c3/mcp"},
tool_filter=create_static_tool_filter(
allowed_tool_names=["get_repository", "list_issues"]
),
) as server:
agent = Agent(name="Assistant", mcp_servers=[server])
result = await Runner.run(agent, "Get repo info")
print(result.final_output)
Multiple MCP Servers
Combine tools from multiple servers:Copy
Ask AI
import asyncio
from agents import Agent, Runner
from agents.mcp import MCPServerStreamableHttp
async def main():
async with MCPServerStreamableHttp(
name="GitHub",
params={
"url": "https://mcp.runlayer.com/github-a1b2c3/mcp",
"headers": {"Authorization": "Bearer github-token"},
},
) as github_server, MCPServerStreamableHttp(
name="Linear",
params={
"url": "https://mcp.runlayer.com/linear-d4e5f6/mcp",
"headers": {"Authorization": "Bearer linear-token"},
},
) as linear_server:
agent = Agent(
name="Project Manager",
instructions="Help coordinate GitHub and Linear tasks.",
mcp_servers=[github_server, linear_server],
)
result = await Runner.run(
agent,
"Create a GitHub issue and a Linear ticket for the bug"
)
print(result.final_output)
asyncio.run(main())
Using MCP Prompts
Fetch prompts from MCP servers:Copy
Ask AI
async with MCPServerStreamableHttp(
name="Server",
params={"url": "https://mcp.runlayer.com/server-xyz/mcp"},
) as server:
# Get available prompts
prompts = await server.list_prompts()
# Fetch a specific prompt
prompt_result = await server.get_prompt(
"generate_instructions",
{"focus": "code review", "language": "python"}
)
instructions = prompt_result.messages[0].content.text
agent = Agent(
name="Code Reviewer",
instructions=instructions,
mcp_servers=[server],
)