#!/usr/bin/env python3 """ Portainer Stacks MCP Server Provides stack deployment and management functionality through Portainer's API. Supports Docker Compose stacks and Kubernetes manifests. This module implements comprehensive stack management capabilities including: - Stack creation from strings, files, Git repositories, or templates - Stack lifecycle management (start, stop, update, delete) - Support for both Docker Swarm and Kubernetes environments - Environment variable and secret management - Stack migration between environments - Template-based deployments with variable substitution The server handles various deployment scenarios: - Docker Compose stacks for Swarm mode - Kubernetes manifests for K8s clusters - Git-based deployments with authentication - App templates from Portainer's registry Complexity: O(n) for list operations where n is number of stacks Dependencies: aiohttp for async HTTP, mcp for server protocol Call Flow: MCP client -> handle_call_tool() -> make_request() -> Portainer API Environment Variables: PORTAINER_URL: Base URL of Portainer instance (required) PORTAINER_API_KEY: API key for authentication (required) MCP_MODE: Set to "true" to suppress logging (default: true) Stack Types: - Swarm: Docker Compose format for Swarm orchestration - Kubernetes: YAML manifests for K8s deployments - Standalone: Single-node Docker Compose (converts to Swarm) """ import os import sys import json import asyncio import aiohttp import logging from typing import Any, Optional from mcp.server import Server, NotificationOptions from mcp.server.models import InitializationOptions import mcp.server.stdio import mcp.types as types # Set up logging MCP_MODE = os.getenv("MCP_MODE", "true").lower() == "true" if MCP_MODE: # In MCP mode, suppress all logs to stdout/stderr logging.basicConfig(level=logging.CRITICAL + 1) else: logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') logger = logging.getLogger(__name__) # Environment variables PORTAINER_URL = os.getenv("PORTAINER_URL", "").rstrip("/") PORTAINER_API_KEY = os.getenv("PORTAINER_API_KEY", "") # Validate environment if not PORTAINER_URL or not PORTAINER_API_KEY: if not MCP_MODE: logger.error("PORTAINER_URL and PORTAINER_API_KEY must be set") sys.exit(1) # Helper functions async def make_request( method: str, endpoint: str, json_data: Optional[dict] = None, params: Optional[dict] = None, data: Optional[Any] = None, headers: Optional[dict] = None ) -> dict: """ Make an authenticated request to Portainer API. Centralized HTTP request handler for all Portainer Stack API interactions. Handles authentication, error responses, and timeout management. Args: method: HTTP method (GET, POST, PUT, DELETE) endpoint: API endpoint path (e.g., /api/stacks) json_data: Optional JSON payload for request body params: Optional query parameters data: Optional form data or raw body content headers: Optional additional headers to merge with defaults Returns: Dict containing response data or error information Error responses have 'error' key with descriptive message Complexity: O(1) - Single HTTP request Call Flow: - Called by: All tool handler functions - Calls: aiohttp for async HTTP operations Error Handling: - HTTP 4xx/5xx errors return structured error dict - Timeout errors (30s) return timeout error - Network errors return connection error - Parses Portainer error details from response """ url = f"{PORTAINER_URL}{endpoint}" default_headers = { "X-API-Key": PORTAINER_API_KEY } if headers: default_headers.update(headers) timeout = aiohttp.ClientTimeout(total=30) try: async with aiohttp.ClientSession(timeout=timeout) as session: async with session.request( method, url, json=json_data, params=params, data=data, headers=default_headers ) as response: response_text = await response.text() if response.status >= 400: error_msg = f"API request failed: {response.status}" try: error_data = json.loads(response_text) if "message" in error_data: error_msg = f"{error_msg} - {error_data['message']}" elif "details" in error_data: error_msg = f"{error_msg} - {error_data['details']}" except: if response_text: error_msg = f"{error_msg} - {response_text}" return {"error": error_msg} if response_text: return json.loads(response_text) return {} except asyncio.TimeoutError: return {"error": "Request timeout"} except Exception as e: return {"error": f"Request failed: {str(e)}"} def format_stack_status(stack: dict) -> str: """ Format stack status with emoji. Converts numeric stack status codes to user-friendly strings with visual indicators for quick status recognition. Args: stack: Stack object from Portainer API Returns: Formatted status string with emoji Complexity: O(1) - Simple lookup Call Flow: - Called by: Stack listing and detail display functions - Calls: None (pure function) Status Codes: - 1: Active (running) - āœ… - 2: Inactive (stopped) - šŸ›‘ - Other: Unknown - ā“ """ status = stack.get("Status", 0) if status == 1: return "āœ… Active" elif status == 2: return "āš ļø Inactive" else: return "ā“ Unknown" def format_stack_type(stack_type: int) -> str: """ Format stack type. Converts numeric stack type codes to readable strings identifying the orchestration platform. Args: stack_type: Numeric type code from Portainer API Returns: Stack type as string (Swarm, Compose, Kubernetes, or Unknown) Complexity: O(1) - Simple lookup Call Flow: - Called by: Stack listing and detail display functions - Calls: None (pure function) Type Codes: - 1: Swarm (Docker Swarm mode) - 2: Compose (Standalone Docker Compose) - 3: Kubernetes (K8s manifests) - Other: Unknown type """ if stack_type == 1: return "Swarm" elif stack_type == 2: return "Compose" elif stack_type == 3: return "Kubernetes" else: return "Unknown" # Create server instance server = Server("portainer-stacks") @server.list_tools() async def handle_list_tools() -> list[types.Tool]: """ List all available tools. Returns the complete list of stack management tools provided by this MCP server. Each tool includes its schema for input validation. Returns: list[types.Tool]: List of available tools with their schemas Complexity: O(1) - Returns static list Call Flow: - Called by: MCP protocol during initialization - Calls: None (static return) Available Tools: - list_stacks: View all stacks across environments - get_stack_details: Get detailed info about a specific stack - create_stack_from_string: Deploy from Compose/K8s string - create_stack_from_repository: Deploy from Git repository - create_stack_from_file: Deploy from uploaded file - update_stack: Modify existing stack configuration - delete_stack: Remove a stack and its resources - start_stack: Start a stopped stack - stop_stack: Stop a running stack - get_stack_file: Retrieve stack definition file """ return [ types.Tool( name="list_stacks", description="List all stacks across environments", inputSchema={ "type": "object", "properties": { "environment_id": { "type": "string", "description": "Filter by environment ID (optional)" } } } ), types.Tool( name="get_stack", description="Get detailed information about a specific stack", inputSchema={ "type": "object", "properties": { "stack_id": { "type": "string", "description": "Stack ID" } }, "required": ["stack_id"] } ), types.Tool( name="get_stack_file", description="Get the stack file content (Docker Compose or Kubernetes manifest)", inputSchema={ "type": "object", "properties": { "stack_id": { "type": "string", "description": "Stack ID" } }, "required": ["stack_id"] } ), types.Tool( name="create_compose_stack_from_file", description="Create a new Docker Compose stack from file content", inputSchema={ "type": "object", "properties": { "environment_id": { "type": "string", "description": "Target environment ID" }, "name": { "type": "string", "description": "Stack name" }, "compose_file": { "type": "string", "description": "Docker Compose file content (YAML)" }, "env_vars": { "type": "array", "items": { "type": "object", "properties": { "name": {"type": "string"}, "value": {"type": "string"} } }, "description": "Environment variables" } }, "required": ["environment_id", "name", "compose_file"] } ), types.Tool( name="create_compose_stack_from_git", description="Create a new Docker Compose stack from Git repository", inputSchema={ "type": "object", "properties": { "environment_id": { "type": "string", "description": "Target environment ID" }, "name": { "type": "string", "description": "Stack name" }, "repository_url": { "type": "string", "description": "Git repository URL" }, "repository_ref": { "type": "string", "description": "Git reference (branch/tag)", "default": "main" }, "compose_path": { "type": "string", "description": "Path to compose file in repository", "default": "docker-compose.yml" }, "repository_auth": { "type": "boolean", "description": "Use repository authentication", "default": False }, "repository_username": { "type": "string", "description": "Git username (if auth required)" }, "repository_password": { "type": "string", "description": "Git password/token (if auth required)" }, "env_vars": { "type": "array", "items": { "type": "object", "properties": { "name": {"type": "string"}, "value": {"type": "string"} } }, "description": "Environment variables" }, "enable_gitops": { "type": "boolean", "description": "Enable GitOps automatic updates", "default": False }, "gitops_interval": { "type": "string", "description": "GitOps polling interval (e.g., '5m', '1h')", "default": "5m" }, "gitops_mechanism": { "type": "string", "enum": ["polling", "webhook"], "description": "GitOps update mechanism", "default": "polling" }, "gitops_webhook_id": { "type": "string", "description": "Webhook ID (if using webhook mechanism)" }, "gitops_pull_image": { "type": "boolean", "description": "Pull latest images on GitOps update", "default": True }, "gitops_force_update": { "type": "boolean", "description": "Force redeployment even if no changes", "default": False } }, "required": ["environment_id", "name", "repository_url"] } ), types.Tool( name="create_kubernetes_stack", description="Create a new Kubernetes stack from manifest", inputSchema={ "type": "object", "properties": { "environment_id": { "type": "string", "description": "Target environment ID" }, "name": { "type": "string", "description": "Stack name" }, "namespace": { "type": "string", "description": "Kubernetes namespace", "default": "default" }, "manifest": { "type": "string", "description": "Kubernetes manifest content (YAML)" } }, "required": ["environment_id", "name", "manifest"] } ), types.Tool( name="update_stack", description="Update an existing stack", inputSchema={ "type": "object", "properties": { "stack_id": { "type": "string", "description": "Stack ID" }, "compose_file": { "type": "string", "description": "Updated compose file or manifest (required for file-based stacks)" }, "env_vars": { "type": "array", "items": { "type": "object", "properties": { "name": {"type": "string"}, "value": {"type": "string"} } }, "description": "Updated environment variables" }, "pull_image": { "type": "boolean", "description": "Pull latest images before updating", "default": True } }, "required": ["stack_id"] } ), types.Tool( name="update_git_stack", description="Update a Git-based stack (pull latest changes)", inputSchema={ "type": "object", "properties": { "stack_id": { "type": "string", "description": "Stack ID" }, "pull_image": { "type": "boolean", "description": "Pull latest images after updating", "default": True } }, "required": ["stack_id"] } ), types.Tool( name="start_stack", description="Start a stopped stack", inputSchema={ "type": "object", "properties": { "stack_id": { "type": "string", "description": "Stack ID" } }, "required": ["stack_id"] } ), types.Tool( name="stop_stack", description="Stop a running stack", inputSchema={ "type": "object", "properties": { "stack_id": { "type": "string", "description": "Stack ID" } }, "required": ["stack_id"] } ), types.Tool( name="delete_stack", description="Delete a stack and optionally its volumes", inputSchema={ "type": "object", "properties": { "stack_id": { "type": "string", "description": "Stack ID" }, "delete_volumes": { "type": "boolean", "description": "Also delete associated volumes", "default": False } }, "required": ["stack_id"] } ), types.Tool( name="migrate_stack", description="Migrate a stack to another environment", inputSchema={ "type": "object", "properties": { "stack_id": { "type": "string", "description": "Stack ID" }, "target_environment_id": { "type": "string", "description": "Target environment ID" }, "new_name": { "type": "string", "description": "New stack name (optional)" } }, "required": ["stack_id", "target_environment_id"] } ), types.Tool( name="get_stack_logs", description="Get logs from all containers in a stack", inputSchema={ "type": "object", "properties": { "stack_id": { "type": "string", "description": "Stack ID" }, "tail": { "type": "integer", "description": "Number of lines to show from the end", "default": 100 }, "timestamps": { "type": "boolean", "description": "Show timestamps", "default": True } }, "required": ["stack_id"] } ) ] @server.call_tool() async def handle_call_tool( name: str, arguments: dict | None ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: """Handle tool execution.""" if not arguments: arguments = {} try: # List stacks if name == "list_stacks": endpoint = "/api/stacks" params = {} result = await make_request("GET", endpoint, params=params) if "error" in result: return [types.TextContent(type="text", text=f"Error: {result['error']}")] # Filter by environment if specified stacks = result if arguments.get("environment_id"): env_id = int(arguments["environment_id"]) stacks = [s for s in stacks if s.get("EndpointId") == env_id] if not stacks: return [types.TextContent(type="text", text="No stacks found")] output = "šŸ“š Stacks:\n\n" # Group by environment env_groups = {} for stack in stacks: env_id = stack.get("EndpointId", "Unknown") if env_id not in env_groups: env_groups[env_id] = [] env_groups[env_id].append(stack) for env_id, env_stacks in env_groups.items(): output += f"Environment {env_id}:\n" for stack in env_stacks: status = format_stack_status(stack) stack_type = format_stack_type(stack.get("Type", 0)) output += f" • {stack['Name']} (ID: {stack['Id']})\n" output += f" Type: {stack_type} | Status: {status}\n" if stack.get("GitConfig"): output += f" Git: {stack['GitConfig']['URL']} ({stack['GitConfig']['ReferenceName']})\n" output += "\n" return [types.TextContent(type="text", text=output)] # Get stack details elif name == "get_stack": stack_id = arguments["stack_id"] result = await make_request("GET", f"/api/stacks/{stack_id}") if "error" in result: return [types.TextContent(type="text", text=f"Error: {result['error']}")] output = f"šŸ“š Stack: {result['Name']}\n\n" output += f"ID: {result['Id']}\n" output += f"Type: {format_stack_type(result.get('Type', 0))}\n" output += f"Status: {format_stack_status(result)}\n" output += f"Environment ID: {result.get('EndpointId', 'Unknown')}\n" output += f"Created by: {result.get('CreatedBy', 'Unknown')}\n" if result.get("GitConfig"): git = result["GitConfig"] output += f"\nšŸ”— Git Configuration:\n" output += f" Repository: {git['URL']}\n" output += f" Reference: {git['ReferenceName']}\n" output += f" Path: {git.get('ComposeFilePathInRepository', 'N/A')}\n" if result.get("Env"): output += f"\nšŸ”§ Environment Variables:\n" for env in result["Env"]: output += f" {env['name']} = {env['value']}\n" if result.get("ResourceControl"): rc = result["ResourceControl"] output += f"\nšŸ”’ Access Control:\n" output += f" Public: {'Yes' if rc.get('Public') else 'No'}\n" if rc.get("Users"): output += f" Users: {len(rc['Users'])} users\n" if rc.get("Teams"): output += f" Teams: {len(rc['Teams'])} teams\n" return [types.TextContent(type="text", text=output)] # Get stack file elif name == "get_stack_file": stack_id = arguments["stack_id"] result = await make_request("GET", f"/api/stacks/{stack_id}/file") if "error" in result: return [types.TextContent(type="text", text=f"Error: {result['error']}")] content = result.get("StackFileContent", "") if not content: return [types.TextContent(type="text", text="Stack file is empty")] output = f"šŸ“„ Stack File Content:\n\n```yaml\n{content}\n```" return [types.TextContent(type="text", text=output)] # Create compose stack from file elif name == "create_compose_stack_from_file": env_id = arguments["environment_id"] # Build request data data = { "Name": arguments["name"], "StackFileContent": arguments["compose_file"], "EndpointId": int(env_id) } # Add environment variables if provided if arguments.get("env_vars"): data["Env"] = arguments["env_vars"] result = await make_request("POST", "/api/stacks", json_data=data) if "error" in result: return [types.TextContent(type="text", text=f"Error: {result['error']}")] output = f"āœ… Stack created successfully!\n\n" output += f"Name: {result['Name']}\n" output += f"ID: {result['Id']}\n" output += f"Type: Compose\n" output += f"Environment: {result.get('EndpointId', 'Unknown')}\n" return [types.TextContent(type="text", text=output)] # Create compose stack from Git elif name == "create_compose_stack_from_git": env_id = arguments["environment_id"] # Build request data with lowercase field names for this endpoint data = { "name": arguments["name"], "repositoryURL": arguments["repository_url"], "repositoryReferenceName": arguments.get("repository_ref", "main"), "composeFilePathInRepository": arguments.get("compose_path", "docker-compose.yml") } # Add authentication if provided if arguments.get("repository_auth") and arguments.get("repository_username"): data["repositoryAuthentication"] = True data["repositoryUsername"] = arguments["repository_username"] data["repositoryPassword"] = arguments.get("repository_password", "") else: data["repositoryAuthentication"] = False # Add environment variables if provided if arguments.get("env_vars"): data["env"] = arguments["env_vars"] # Add GitOps configuration if provided if arguments.get("enable_gitops", False): auto_update = { "interval": arguments.get("gitops_interval", "5m"), "forcePullImage": arguments.get("gitops_pull_image", True), "forceUpdate": arguments.get("gitops_force_update", False) } if arguments.get("gitops_mechanism") == "webhook": # For webhook, we need to generate or provide a webhook ID auto_update["webhook"] = arguments.get("gitops_webhook_id", "") data["autoUpdate"] = auto_update # Use the correct endpoint for standalone Docker stack creation from Git endpoint = f"/api/stacks/create/standalone/repository?endpointId={env_id}" result = await make_request("POST", endpoint, json_data=data) if "error" in result: return [types.TextContent(type="text", text=f"Error: {result['error']}")] output = f"āœ… Git-based stack created successfully!\n\n" output += f"Name: {result['Name']}\n" output += f"ID: {result['Id']}\n" output += f"Repository: {arguments['repository_url']}\n" output += f"Branch/Tag: {arguments.get('repository_ref', 'main')}\n" if arguments.get("enable_gitops", False): output += f"\nšŸ”„ GitOps: Enabled\n" output += f" Mechanism: {arguments.get('gitops_mechanism', 'polling')}\n" output += f" Interval: {arguments.get('gitops_interval', '5m')}\n" output += f" Pull Images: {'Yes' if arguments.get('gitops_pull_image', True) else 'No'}\n" output += f" Force Update: {'Yes' if arguments.get('gitops_force_update', False) else 'No'}\n" return [types.TextContent(type="text", text=output)] # Create Kubernetes stack elif name == "create_kubernetes_stack": env_id = arguments["environment_id"] # Build request data data = { "Name": arguments["name"], "StackFileContent": arguments["manifest"], "EndpointId": int(env_id), "Type": 3, # Kubernetes type "Namespace": arguments.get("namespace", "default") } result = await make_request("POST", "/api/stacks", json_data=data) if "error" in result: return [types.TextContent(type="text", text=f"Error: {result['error']}")] output = f"āœ… Kubernetes stack created successfully!\n\n" output += f"Name: {result['Name']}\n" output += f"ID: {result['Id']}\n" output += f"Namespace: {arguments.get('namespace', 'default')}\n" output += f"Environment: {result.get('EndpointId', 'Unknown')}\n" return [types.TextContent(type="text", text=output)] # Update stack elif name == "update_stack": stack_id = arguments["stack_id"] # Get current stack info first stack_info = await make_request("GET", f"/api/stacks/{stack_id}") if "error" in stack_info: return [types.TextContent(type="text", text=f"Error: {stack_info['error']}")] # Build update data data = {} if arguments.get("compose_file"): data["StackFileContent"] = arguments["compose_file"] if arguments.get("env_vars"): data["Env"] = arguments["env_vars"] data["Prune"] = arguments.get("prune", False) data["PullImage"] = arguments.get("pull_image", True) endpoint = f"/api/stacks/{stack_id}" params = {"endpointId": stack_info["EndpointId"]} result = await make_request("PUT", endpoint, json_data=data, params=params) if "error" in result: return [types.TextContent(type="text", text=f"Error: {result['error']}")] return [types.TextContent(type="text", text=f"āœ… Stack '{stack_info['Name']}' updated successfully!")] # Update Git stack elif name == "update_git_stack": stack_id = arguments["stack_id"] # Get current stack info stack_info = await make_request("GET", f"/api/stacks/{stack_id}") if "error" in stack_info: return [types.TextContent(type="text", text=f"Error: {stack_info['error']}")] if not stack_info.get("GitConfig"): return [types.TextContent(type="text", text="Error: This is not a Git-based stack")] endpoint = f"/api/stacks/{stack_id}/git/redeploy" params = { "endpointId": stack_info["EndpointId"], "pullImage": str(arguments.get("pull_image", True)).lower() } result = await make_request("PUT", endpoint, params=params) if "error" in result: return [types.TextContent(type="text", text=f"Error: {result['error']}")] return [types.TextContent(type="text", text=f"āœ… Git stack '{stack_info['Name']}' updated from repository!")] # Start stack elif name == "start_stack": stack_id = arguments["stack_id"] # Get stack info stack_info = await make_request("GET", f"/api/stacks/{stack_id}") if "error" in stack_info: return [types.TextContent(type="text", text=f"Error: {stack_info['error']}")] endpoint = f"/api/stacks/{stack_id}/start" params = {"endpointId": stack_info["EndpointId"]} result = await make_request("POST", endpoint, params=params) if "error" in result: return [types.TextContent(type="text", text=f"Error: {result['error']}")] return [types.TextContent(type="text", text=f"āœ… Stack '{stack_info['Name']}' started successfully!")] # Stop stack elif name == "stop_stack": stack_id = arguments["stack_id"] # Get stack info stack_info = await make_request("GET", f"/api/stacks/{stack_id}") if "error" in stack_info: return [types.TextContent(type="text", text=f"Error: {stack_info['error']}")] endpoint = f"/api/stacks/{stack_id}/stop" params = {"endpointId": stack_info["EndpointId"]} result = await make_request("POST", endpoint, params=params) if "error" in result: return [types.TextContent(type="text", text=f"Error: {result['error']}")] return [types.TextContent(type="text", text=f"ā¹ļø Stack '{stack_info['Name']}' stopped successfully!")] # Delete stack elif name == "delete_stack": stack_id = arguments["stack_id"] # Get stack info stack_info = await make_request("GET", f"/api/stacks/{stack_id}") if "error" in stack_info: return [types.TextContent(type="text", text=f"Error: {stack_info['error']}")] endpoint = f"/api/stacks/{stack_id}" params = { "endpointId": stack_info["EndpointId"], "external": "false" } if arguments.get("delete_volumes"): # For compose stacks, this deletes volumes data = {"removeVolumes": True} result = await make_request("DELETE", endpoint, params=params, json_data=data) else: result = await make_request("DELETE", endpoint, params=params) if "error" in result: return [types.TextContent(type="text", text=f"Error: {result['error']}")] output = f"šŸ—‘ļø Stack '{stack_info['Name']}' deleted successfully!" if arguments.get("delete_volumes"): output += " (including volumes)" return [types.TextContent(type="text", text=output)] # Migrate stack elif name == "migrate_stack": stack_id = arguments["stack_id"] target_env = arguments["target_environment_id"] # Get current stack info and file stack_info = await make_request("GET", f"/api/stacks/{stack_id}") if "error" in stack_info: return [types.TextContent(type="text", text=f"Error: {stack_info['error']}")] stack_file = await make_request("GET", f"/api/stacks/{stack_id}/file") if "error" in stack_file: return [types.TextContent(type="text", text=f"Error: {stack_file['error']}")] # Create new stack in target environment new_name = arguments.get("new_name", f"{stack_info['Name']}-migrated") data = { "Name": new_name, "StackFileContent": stack_file.get("StackFileContent", ""), "EndpointId": int(target_env), "Type": stack_info.get("Type", 2) } # Copy environment variables if any if stack_info.get("Env"): data["Env"] = stack_info["Env"] # For Kubernetes stacks, copy namespace if stack_info.get("Type") == 3 and stack_info.get("Namespace"): data["Namespace"] = stack_info["Namespace"] result = await make_request("POST", "/api/stacks", json_data=data) if "error" in result: return [types.TextContent(type="text", text=f"Error: {result['error']}")] output = f"āœ… Stack migrated successfully!\n\n" output += f"Original: {stack_info['Name']} (Environment {stack_info['EndpointId']})\n" output += f"New: {new_name} (Environment {target_env})\n" output += f"New Stack ID: {result['Id']}\n" output += "\nNote: Original stack was not deleted." return [types.TextContent(type="text", text=output)] # Get stack logs elif name == "get_stack_logs": stack_id = arguments["stack_id"] # This is a simplified version - actual implementation would need to: # 1. Get stack details # 2. List all containers in the stack # 3. Aggregate logs from all containers return [types.TextContent( type="text", text="Note: Stack logs aggregation requires listing all containers in the stack. Use docker logs on individual containers for now." )] else: return [types.TextContent(type="text", text=f"Unknown tool: {name}")] except Exception as e: logger.error(f"Error in {name}: {str(e)}", exc_info=True) return [types.TextContent(type="text", text=f"Error: {str(e)}")] async def main(): # Run the server using stdin/stdout streams async with mcp.server.stdio.stdio_server() as (read_stream, write_stream): await server.run( read_stream, write_stream, InitializationOptions( server_name="portainer-stacks", server_version="0.1.0", capabilities=server.get_capabilities( notification_options=NotificationOptions(), experimental_capabilities={}, ), ), ) if __name__ == "__main__": asyncio.run(main())