diff --git a/README.md b/README.md index dc23d0d1d..2dedcff0a 100644 --- a/README.md +++ b/README.md @@ -1363,7 +1363,9 @@ starlette_app = Starlette(routes=[...]) # Then wrap it with CORS middleware starlette_app = CORSMiddleware( starlette_app, - allow_origins=["*"], # Configure appropriately for production + # Allow browser-based clients (ChatGPT + local dev). For production, prefer + # a strict allowlist of known UI origins. + allow_origin_regex=r"^https://(chatgpt\.com|chat\.openai\.com)$|^https?://(localhost|127\.0\.0\.1)(:\d+)?$", allow_methods=["GET", "POST", "DELETE"], # MCP streamable HTTP methods expose_headers=["Mcp-Session-Id"], ) diff --git a/README.v2.md b/README.v2.md index 67f181811..03f7429fa 100644 --- a/README.v2.md +++ b/README.v2.md @@ -1364,7 +1364,9 @@ starlette_app = Starlette(routes=[...]) # Then wrap it with CORS middleware starlette_app = CORSMiddleware( starlette_app, - allow_origins=["*"], # Configure appropriately for production + # Allow browser-based clients (ChatGPT + local dev). For production, prefer + # a strict allowlist of known UI origins. + allow_origin_regex=r"^https://(chatgpt\.com|chat\.openai\.com)$|^https?://(localhost|127\.0\.0\.1)(:\d+)?$", allow_methods=["GET", "POST", "DELETE"], # MCP streamable HTTP methods expose_headers=["Mcp-Session-Id"], ) diff --git a/examples/servers/simple-streamablehttp-stateless/mcp_simple_streamablehttp_stateless/server.py b/examples/servers/simple-streamablehttp-stateless/mcp_simple_streamablehttp_stateless/server.py index 9fed2f0aa..126c31329 100644 --- a/examples/servers/simple-streamablehttp-stateless/mcp_simple_streamablehttp_stateless/server.py +++ b/examples/servers/simple-streamablehttp-stateless/mcp_simple_streamablehttp_stateless/server.py @@ -127,7 +127,9 @@ async def lifespan(app: Starlette) -> AsyncIterator[None]: # for browser-based clients (ensures 500 errors get proper CORS headers) starlette_app = CORSMiddleware( starlette_app, - allow_origins=["*"], # Allow all origins - adjust as needed for production + # Allow browser-based clients (ChatGPT + local dev). For production, prefer + # a strict allowlist of known UI origins. + allow_origin_regex=r"^https://(chatgpt\.com|chat\.openai\.com)$|^https?://(localhost|127\.0\.0\.1)(:\d+)?$", allow_methods=["GET", "POST", "DELETE"], # MCP streamable HTTP methods expose_headers=["Mcp-Session-Id"], ) diff --git a/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/server.py b/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/server.py index ef03d9b08..ab3e26776 100644 --- a/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/server.py +++ b/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/server.py @@ -152,7 +152,9 @@ async def lifespan(app: Starlette) -> AsyncIterator[None]: # for browser-based clients (ensures 500 errors get proper CORS headers) starlette_app = CORSMiddleware( starlette_app, - allow_origins=["*"], # Allow all origins - adjust as needed for production + # Allow browser-based clients (ChatGPT + local dev). For production, prefer + # a strict allowlist of known UI origins. + allow_origin_regex=r"^https://(chatgpt\.com|chat\.openai\.com)$|^https?://(localhost|127\.0\.0\.1)(:\d+)?$", allow_methods=["GET", "POST", "DELETE"], # MCP streamable HTTP methods expose_headers=["Mcp-Session-Id"], )