Python MCP server
Create a Model Context Protocol server using Python and FastMCP with Apify Actor integration for pay-per-event monetization.
src/main.py
1"""Main entry point for the MCP Server Actor."""2
3import asyncio4import os5import time6from collections.abc import Mapping, MutableMapping7from typing import Any8
9import uvicorn10from apify import Actor11from fastmcp import FastMCP12from starlette.requests import Request13from starlette.types import Receive, Scope, Send14
15
16def get_server() -> FastMCP:17 """Create an MCP server with tools and resources."""18 server = FastMCP('python-mcp-empty', '1.0.0')19
20 @server.tool()21 def add(a: float, b: float) -> dict:22 """Add two numbers together and return the sum."""23 result = a + b24 return {25 'type': 'text',26 'text': f'The sum of {a} and {b} is {result}',27 'structuredContent': {28 'result': result,29 'operands': {'a': a, 'b': b},30 'operation': 'addition',31 },32 }33
34 @server.resource(uri='https://example.com/calculator', name='calculator-info')35 def calculator_info() -> str:36 """Get information about the calculator service."""37 return 'This is a simple calculator MCP server that can add two numbers together.'38
39 return server40
41
42def get_session_id(headers: Mapping[str, str]) -> str | None:43 """Extract session ID from request headers."""44 for key in ('mcp-session-id', 'mcp_session_id'):45 if value := headers.get(key):46 return value47 return None48
49
50class SessionTrackingMiddleware:51 """ASGI middleware that tracks MCP sessions and closes idle ones."""52
53 def __init__(self, app: Any, port: int, timeout_secs: int) -> None:54 self.app = app55 self.port = port56 self.timeout_secs = timeout_secs57
58 # Session tracking state59 self._last_activity: dict[str, float] = {}60 self._timers: dict[str, asyncio.Task[None]] = {}61
62 def _session_cleanup(self, sid: str) -> None:63 self._last_activity.pop(sid, None)64 if (timer := self._timers.pop(sid, None)) and not timer.done():65 timer.cancel()66
67 def _touch(self, sid: str) -> None:68 self._last_activity[sid] = time.time()69
70 # Cancel existing timer71 if (timer := self._timers.get(sid)) and not timer.done():72 timer.cancel()73
74 async def close_if_idle() -> None:75 try:76 await asyncio.sleep(self.timeout_secs)77
78 # Check if activity occurred during sleep79 elapsed = time.time() - self._last_activity.get(sid, 0)80 if elapsed < self.timeout_secs * 0.9:81 return82
83 Actor.log.info(f'Closing idle session: {sid}')84
85 # Send internal DELETE request to close session86 scope: Scope = {87 'type': 'http',88 'http_version': '1.1',89 'method': 'DELETE',90 'scheme': 'http',91 'path': '/mcp',92 'raw_path': b'/mcp',93 'query_string': b'',94 'headers': [(b'mcp-session-id', sid.encode())],95 'server': ('127.0.0.1', self.port),96 'client': ('127.0.0.1', 0),97 '_idle_close': True,98 }99
100 async def noop_receive() -> MutableMapping[str, Any]:101 return {'type': 'http.request', 'body': b'', 'more_body': False}102
103 async def noop_send(_: MutableMapping[str, Any]) -> None:104 pass105
106 # Re-enter middleware with an internal DELETE; _idle_close will skip tracking107 await self(scope, noop_receive, noop_send)108 self._session_cleanup(sid)109
110 except asyncio.CancelledError:111 pass112 except Exception as e:113 Actor.log.exception(f'Failed to close idle session {sid}: {e}')114
115 self._timers[sid] = asyncio.create_task(close_if_idle())116
117 async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:118 """ASGI entry point that wraps the underlying app."""119 # Pass through non-MCP requests120 path = scope.get('path', '')121 if scope.get('type') != 'http' or path not in ('/mcp', '/mcp/'):122 await self.app(scope, receive, send)123 return124
125 # Skip tracking for internal idle-close requests126 if scope.get('_idle_close'):127 await self.app(scope, receive, send)128 return129
130 request = Request(scope, receive)131 sid = get_session_id(request.headers)132 is_delete = scope.get('method') == 'DELETE'133
134 # Track activity for existing sessions (skip DELETE)135 if sid and not is_delete:136 self._touch(sid)137
138 # Capture new session ID from response headers139 new_sid: str | None = None140
141 async def capture_send(msg: MutableMapping[str, Any]) -> None:142 nonlocal new_sid143 if msg.get('type') == 'http.response.start':144 for k, v in msg.get('headers', []):145 if k.decode().lower() == 'mcp-session-id':146 new_sid = v.decode()147 break148 await send(msg)149
150 await self.app(scope, receive, capture_send)151
152 # Track a newly created session153 if not sid and new_sid:154 Actor.log.info(f'New session: {new_sid}')155 self._touch(new_sid)156
157 # Cleanup on explicit DELETE158 if is_delete and sid:159 Actor.log.info(f'Session closed: {sid}')160 self._session_cleanup(sid)161
162
163async def main() -> None:164 """Run the MCP Server Actor with session timeout handling."""165 await Actor.init()166
167 port = int(os.environ.get('APIFY_CONTAINER_PORT', '3000'))168 timeout_secs = int(os.environ.get('SESSION_TIMEOUT_SECS', '300'))169
170 server = get_server()171 app = server.http_app(transport='streamable-http')172
173 # Wrap the app with session tracking middleware to handle idle timeouts174 app = SessionTrackingMiddleware(app=app, port=port, timeout_secs=timeout_secs)175
176 try:177 Actor.log.info(f'Starting MCP server on port {port} (session timeout: {timeout_secs}s)')178 config = uvicorn.Config(app, host='0.0.0.0', port=port, log_level='info') # noqa: S104179 await uvicorn.Server(config).serve()180 except KeyboardInterrupt:181 Actor.log.info('Shutting down...')182 except Exception as e:183 Actor.log.error(f'Server failed: {e}')184 raise185
186
187if __name__ == '__main__':188 asyncio.run(main())A template for creating a Model Context Protocol server using FastMCP on Apify platform .
This template includes a simple example MCP server with:
- An
addtool that adds two numbers together with structured output - A dummy
calculator-inforesource endpoint - Pay Per Event monetization support
- Modify the server: Edit
src/main.pyto add your own tools and resources - Add new tools: Use the
@server.tool()decorator to register new tools - Add new resources: Use the
@server.resource()decorator to register new resources - Update billing: Configure billing events in
.actor/pay_per_event.jsonand charge for tool calls
The server runs on port 3000 (or APIFY_CONTAINER_PORT if set) and exposes the MCP protocol at the /mcp endpoint.
pip install -r requirements.txtAPIFY_META_ORIGIN=STANDBY python -m src
The server will start and listen for MCP requests at http://localhost:3000/mcp
Push your Actor to the Apify platform and configure standby mode .
Then connect to the Actor endpoint with your MCP client: https://me--my-mcp-server.apify.actor/mcp using the Streamable HTTP transport .
Important: When connecting to your deployed MCP server, pass your Apify API token in the Authorization header as a Bearer token:
Authorization: Bearer <YOUR_APIFY_API_TOKEN>
This template uses the Pay Per Event (PPE) monetization model, which provides flexible pricing based on defined events.
To charge users, define events in JSON format and save them on the Apify platform. Here is an example schema with the tool-call event:
{"tool-call": {"eventTitle": "Price for completing a tool call","eventDescription": "Flat fee for completing a tool call.","eventPriceUsd": 0.05}}
In the Actor, trigger the event with:
await Actor.charge(event_name='tool-call')
This approach allows you to programmatically charge users directly from your Actor, covering the costs of execution and related services.
To set up the PPE model for this Actor:
- Configure Pay Per Event: establish the Pay Per Event pricing schema in the Actor's Monetization settings. First, set the Pricing model to
Pay per eventand add the schema. An example schema can be found in pay_per_event.json.
Crawlee + BeautifulSoup
Crawl and scrape websites using Crawlee and BeautifulSoup. Start from a given start URLs, and store results to Apify dataset.
Empty Python project
Empty template with basic structure for the Actor with Apify SDK that allows you to easily add your own functionality.
One‑Page HTML Scraper with BeautifulSoup
Scrape single page with provided URL with HTTPX and extract data from page's HTML with Beautiful Soup.
BeautifulSoup
Example of a web scraper that uses Python HTTPX to scrape HTML from URLs provided on input, parses it using BeautifulSoup and saves results to storage.
Playwright + Chrome
Crawler example that uses headless Chrome driven by Playwright to scrape a website. Headless browsers render JavaScript and can help when getting blocked.
Selenium + Chrome
Scraper example built with Selenium and headless Chrome browser to scrape a website and save the results to storage. A popular alternative to Playwright.