Overview
Model Context Protocol (MCP) is Anthropic’s protocol for providing large language models with tool calling abilities. RedPill supports MCP through OpenAI-compatible tool calling.
MCP allows AI models to interact with external tools and services like file systems, databases, APIs, and more.
What is MCP?
MCP (Model Context Protocol) provides:
Standardized tool definitions - Define tools once, use across models
Stateful interactions - Maintain context across multiple tool calls
Server-based architecture - Tools run as separate services
Rich tool ecosystem - Access file systems, databases, APIs, and more
Installation
pip install mcp anthropic openai
Basic MCP Server Example
File System MCP Server
import asyncio
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from anthropic import Anthropic
from openai import OpenAI
# Initialize RedPill client
redpill = OpenAI(
api_key = "YOUR_REDPILL_API_KEY" ,
base_url = "https://api.redpill.ai/v1"
)
async def main ():
# Configure MCP server for file system access
server_params = StdioServerParameters(
command = "npx" ,
args = [
"-y" ,
"@modelcontextprotocol/server-filesystem" ,
"/path/to/allowed/directory"
]
)
async with stdio_client(server_params) as (read, write):
async with ClientSession(read, write) as session:
# Initialize session
await session.initialize()
# List available tools
tools_result = await session.list_tools()
print ( f "Available tools: { tools_result.tools } " )
# Convert MCP tools to OpenAI format
openai_tools = convert_mcp_to_openai_tools(tools_result.tools)
# Use tools with RedPill
response = redpill.chat.completions.create(
model = "anthropic/claude-3.5-sonnet" ,
messages = [
{
"role" : "user" ,
"content" : "List files in the directory"
}
],
tools = openai_tools
)
# Execute tool calls
if response.choices[ 0 ].message.tool_calls:
for tool_call in response.choices[ 0 ].message.tool_calls:
result = await session.call_tool(
tool_call.function.name,
eval (tool_call.function.arguments)
)
print ( f "Tool result: { result } " )
def convert_mcp_to_openai_tools ( mcp_tools ):
"""Convert MCP tool definitions to OpenAI format"""
openai_tools = []
for tool in mcp_tools:
openai_tools.append({
"type" : "function" ,
"function" : {
"name" : tool.name,
"description" : tool.description,
"parameters" : tool.inputSchema
}
})
return openai_tools
if __name__ == "__main__" :
asyncio.run(main())
Complete MCP Client Example
import asyncio
import os
from typing import Optional
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from openai import OpenAI
class MCPClient :
def __init__ ( self , redpill_api_key : str ):
self .client = OpenAI(
api_key = redpill_api_key,
base_url = "https://api.redpill.ai/v1"
)
self .session: Optional[ClientSession] = None
def convert_tool_format ( self , mcp_tool ):
"""Convert MCP tool definition to OpenAI format"""
return {
"type" : "function" ,
"function" : {
"name" : mcp_tool.name,
"description" : mcp_tool.description,
"parameters" : mcp_tool.inputSchema
}
}
async def connect_to_server ( self , server_command : str , * args ):
"""Connect to an MCP server"""
server_params = StdioServerParameters(
command = server_command,
args = list (args)
)
self .read, self .write = await stdio_client(server_params). __aenter__ ()
self .session = await ClientSession( self .read, self .write). __aenter__ ()
await self .session.initialize()
# Get available tools
tools_result = await self .session.list_tools()
self .tools = [
self .convert_tool_format(tool)
for tool in tools_result.tools
]
print ( f "Connected to MCP server. Available tools: { len ( self .tools) } " )
async def chat ( self , user_message : str , model : str = "anthropic/claude-3.5-sonnet" ):
"""Send a message and handle tool calls"""
messages = [{ "role" : "user" , "content" : user_message}]
while True :
response = self .client.chat.completions.create(
model = model,
messages = messages,
tools = self .tools
)
message = response.choices[ 0 ].message
messages.append({
"role" : "assistant" ,
"content" : message.content,
"tool_calls" : message.tool_calls if message.tool_calls else None
})
# If no tool calls, we're done
if not message.tool_calls:
return message.content
# Execute tool calls
for tool_call in message.tool_calls:
tool_name = tool_call.function.name
tool_args = eval (tool_call.function.arguments)
print ( f "Calling tool: { tool_name } with args: { tool_args } " )
# Call MCP server tool
result = await self .session.call_tool(tool_name, tool_args)
# Add tool result to messages
messages.append({
"role" : "tool" ,
"tool_call_id" : tool_call.id,
"content" : str (result.content)
})
async def main ():
# Initialize MCP client
client = MCPClient( redpill_api_key = "YOUR_REDPILL_API_KEY" )
# Connect to file system server
await client.connect_to_server(
"npx" ,
"-y" ,
"@modelcontextprotocol/server-filesystem" ,
"/Users/yourname/Documents"
)
# Interactive chat
response = await client.chat(
"What files are in this directory? Show me the 5 most recent."
)
print ( f " \n AI Response: { response } " )
if __name__ == "__main__" :
asyncio.run(main())
Popular MCP Servers
1. File System Server
Access local file systems:
npx -y @modelcontextprotocol/server-filesystem /path/to/directory
Available tools:
read_file
- Read file contents
write_file
- Write to files
list_directory
- List directory contents
search_files
- Search for files
get_file_info
- Get file metadata
2. GitHub Server
Interact with GitHub repositories:
npx -y @modelcontextprotocol/server-github
Available tools:
create_issue
- Create GitHub issues
create_pull_request
- Create PRs
search_repositories
- Search repos
get_file_contents
- Read files from repos
3. PostgreSQL Server
Query databases:
npx -y @modelcontextprotocol/server-postgres postgres://connection-string
Available tools:
query
- Execute SQL queries
list_tables
- List database tables
describe_table
- Get table schema
4. Web Search Server
Perform web searches:
npx -y @modelcontextprotocol/server-brave-search
Available tools:
brave_search
- Search the web
get_page_contents
- Fetch webpage content
Use Case: Code Analysis Assistant
import asyncio
from openai import OpenAI
client = OpenAI(
api_key = "YOUR_REDPILL_API_KEY" ,
base_url = "https://api.redpill.ai/v1"
)
async def analyze_codebase ():
mcp_client = MCPClient(client)
# Connect to file system
await mcp_client.connect_to_server(
"npx" , "-y" ,
"@modelcontextprotocol/server-filesystem" ,
"/path/to/your/project"
)
# Analyze code
response = await mcp_client.chat(
"""Analyze this codebase:
1. List all Python files
2. Find files containing 'TODO' comments
3. Identify the main entry point
4. Suggest code improvements
""" ,
model = "anthropic/claude-3.5-sonnet"
)
print (response)
asyncio.run(analyze_codebase())
Use Case: GitHub PR Assistant
async def create_pr_from_description ():
mcp_client = MCPClient(client)
# Connect to GitHub server
await mcp_client.connect_to_server(
"npx" , "-y" ,
"@modelcontextprotocol/server-github"
)
# Create PR with AI assistance
response = await mcp_client.chat(
"""Create a pull request:
- Repository: username/repo
- Title: Add user authentication
- Description: Implement JWT-based auth with Redis sessions
- Base branch: main
- Head branch: feature/auth
""" ,
model = "openai/gpt-4o"
)
print (response)
Use Case: Database Query Assistant
async def query_database ():
mcp_client = MCPClient(client)
# Connect to PostgreSQL
await mcp_client.connect_to_server(
"npx" , "-y" ,
"@modelcontextprotocol/server-postgres" ,
"postgres://user:pass@localhost/db"
)
# Natural language database queries
response = await mcp_client.chat(
"Show me the top 10 users by revenue in the last 30 days" ,
model = "deepseek/deepseek-chat" # Good for SQL
)
print (response)
Security Considerations
Important Security Notes:
MCP servers can access sensitive resources (files, databases, APIs)
Only connect to trusted MCP servers
Use file system access restrictions
Validate all tool inputs
Monitor tool execution logs
Use read-only database connections when possible
# Restrict file system access to specific directories
safe_directories = [
"/Users/yourname/Documents/safe-folder" ,
"/tmp/mcp-workspace"
]
for directory in safe_directories:
await mcp_client.connect_to_server(
"npx" , "-y" ,
"@modelcontextprotocol/server-filesystem" ,
directory
)
Supported Models
All RedPill models support tool calling with MCP:
Model Best For anthropic/claude-3.5-sonnet
Complex reasoning, code analysis openai/gpt-4o
General tool use deepseek/deepseek-chat
SQL queries, coding tasks phala/qwen-2.5-7b-instruct
Confidential data processing
Benefits of MCP
Standardization Use the same tool definitions across different AI models
Flexibility Easily swap between different MCP server implementations
Rich Ecosystem Access growing library of MCP servers and tools
Privacy Tools run locally or in TEE environments
Resources
Next Steps