refactor: Modularize MCP server with command batching
- Split monolithic mcp/server.py (1874 lines) into haproxy_mcp/ package:
- config.py: Configuration constants and environment variables
- exceptions.py: Custom exception classes
- validation.py: Input validation functions
- haproxy_client.py: HAProxy Runtime API client with batch support
- file_ops.py: Atomic file operations with locking
- utils.py: CSV parsing utilities
- tools/: MCP tools organized by function
- domains.py: Domain management (3 tools)
- servers.py: Server management (7 tools)
- health.py: Health checks (3 tools)
- monitoring.py: Monitoring (4 tools)
- configuration.py: Config management (4 tools)
- Add haproxy_cmd_batch() for sending multiple commands in single TCP connection
- Optimize server operations: 1 connection instead of 2 per server
- Optimize startup restore: All servers in 1 connection (was 2×N)
- Update type hints to Python 3.9+ style (built-in generics)
- Remove unused imports and functions
- Update CLAUDE.md with new structure and performance notes
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
262
haproxy_mcp/file_ops.py
Normal file
262
haproxy_mcp/file_ops.py
Normal file
@@ -0,0 +1,262 @@
|
||||
"""File I/O operations for HAProxy MCP Server."""
|
||||
|
||||
import fcntl
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
from typing import Any, Optional
|
||||
|
||||
from .config import (
|
||||
MAP_FILE,
|
||||
SERVERS_FILE,
|
||||
logger,
|
||||
)
|
||||
from .validation import domain_to_backend
|
||||
|
||||
|
||||
def atomic_write_file(file_path: str, content: str) -> None:
|
||||
"""Write content to file atomically using temp file + rename.
|
||||
|
||||
Args:
|
||||
file_path: Target file path
|
||||
content: Content to write
|
||||
|
||||
Raises:
|
||||
IOError: If write fails
|
||||
"""
|
||||
dir_path = os.path.dirname(file_path)
|
||||
fd = None
|
||||
temp_path = None
|
||||
try:
|
||||
fd, temp_path = tempfile.mkstemp(dir=dir_path, prefix='.tmp.')
|
||||
with os.fdopen(fd, 'w', encoding='utf-8') as f:
|
||||
fd = None # fd is now owned by the file object
|
||||
f.write(content)
|
||||
os.rename(temp_path, file_path)
|
||||
temp_path = None # Rename succeeded
|
||||
except OSError as e:
|
||||
raise IOError(f"Failed to write {file_path}: {e}") from e
|
||||
finally:
|
||||
if fd is not None:
|
||||
try:
|
||||
os.close(fd)
|
||||
except OSError:
|
||||
pass
|
||||
if temp_path is not None:
|
||||
try:
|
||||
os.unlink(temp_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def get_map_contents() -> list[tuple[str, str]]:
|
||||
"""Read domains.map file and return list of (domain, backend) tuples.
|
||||
|
||||
Returns:
|
||||
List of (domain, backend) tuples from the map file
|
||||
"""
|
||||
entries = []
|
||||
try:
|
||||
with open(MAP_FILE, "r", encoding="utf-8") as f:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_SH)
|
||||
except OSError:
|
||||
pass # Continue without lock if not supported
|
||||
try:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
parts = line.split()
|
||||
if len(parts) >= 2:
|
||||
entries.append((parts[0], parts[1]))
|
||||
finally:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
||||
except OSError:
|
||||
pass
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
return entries
|
||||
|
||||
|
||||
def save_map_file(entries: list[tuple[str, str]]) -> None:
|
||||
"""Save entries to domains.map file atomically.
|
||||
|
||||
Uses temp file + rename for atomic write to prevent race conditions.
|
||||
|
||||
Args:
|
||||
entries: List of (domain, backend) tuples to write
|
||||
|
||||
Raises:
|
||||
IOError: If the file cannot be written
|
||||
"""
|
||||
lines = [
|
||||
"# Domain to Backend mapping\n",
|
||||
"# Format: domain backend_name\n",
|
||||
"# Wildcard: .domain.com matches *.domain.com\n\n",
|
||||
]
|
||||
for domain, backend in entries:
|
||||
lines.append(f"{domain} {backend}\n")
|
||||
atomic_write_file(MAP_FILE, "".join(lines))
|
||||
|
||||
|
||||
def get_domain_backend(domain: str) -> Optional[str]:
|
||||
"""Look up the backend for a domain from domains.map.
|
||||
|
||||
Args:
|
||||
domain: The domain to look up
|
||||
|
||||
Returns:
|
||||
Backend name if found, None otherwise
|
||||
"""
|
||||
for map_domain, backend in get_map_contents():
|
||||
if map_domain == domain:
|
||||
return backend
|
||||
return None
|
||||
|
||||
|
||||
def is_legacy_backend(backend: str) -> bool:
|
||||
"""Check if backend is a legacy static backend (not a pool).
|
||||
|
||||
Args:
|
||||
backend: Backend name to check
|
||||
|
||||
Returns:
|
||||
True if this is a legacy backend, False if it's a pool
|
||||
"""
|
||||
return not backend.startswith("pool_")
|
||||
|
||||
|
||||
def get_legacy_backend_name(domain: str) -> str:
|
||||
"""Convert domain to legacy backend name format.
|
||||
|
||||
Args:
|
||||
domain: Domain name
|
||||
|
||||
Returns:
|
||||
Legacy backend name (e.g., 'api_example_com_backend')
|
||||
"""
|
||||
return f"{domain_to_backend(domain)}_backend"
|
||||
|
||||
|
||||
def get_backend_and_prefix(domain: str) -> tuple[str, str]:
|
||||
"""Look up backend and determine server name prefix for a domain.
|
||||
|
||||
Args:
|
||||
domain: The domain name to look up
|
||||
|
||||
Returns:
|
||||
Tuple of (backend_name, server_prefix)
|
||||
|
||||
Raises:
|
||||
ValueError: If domain cannot be mapped to a valid backend
|
||||
"""
|
||||
backend = get_domain_backend(domain)
|
||||
if not backend:
|
||||
backend = get_legacy_backend_name(domain)
|
||||
|
||||
if backend.startswith("pool_"):
|
||||
server_prefix = backend
|
||||
else:
|
||||
server_prefix = domain_to_backend(domain)
|
||||
|
||||
return backend, server_prefix
|
||||
|
||||
|
||||
def load_servers_config() -> dict[str, Any]:
|
||||
"""Load servers configuration from JSON file with file locking.
|
||||
|
||||
Returns:
|
||||
Dictionary with server configurations
|
||||
"""
|
||||
try:
|
||||
with open(SERVERS_FILE, "r", encoding="utf-8") as f:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_SH)
|
||||
except OSError:
|
||||
logger.debug("File locking not supported for %s", SERVERS_FILE)
|
||||
try:
|
||||
return json.load(f)
|
||||
finally:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
||||
except OSError:
|
||||
pass
|
||||
except FileNotFoundError:
|
||||
return {}
|
||||
except json.JSONDecodeError as e:
|
||||
logger.warning("Corrupt config file %s: %s", SERVERS_FILE, e)
|
||||
return {}
|
||||
|
||||
|
||||
def save_servers_config(config: dict[str, Any]) -> None:
|
||||
"""Save servers configuration to JSON file atomically.
|
||||
|
||||
Uses temp file + rename for atomic write to prevent race conditions.
|
||||
|
||||
Args:
|
||||
config: Dictionary with server configurations
|
||||
"""
|
||||
atomic_write_file(SERVERS_FILE, json.dumps(config, indent=2))
|
||||
|
||||
|
||||
def add_server_to_config(domain: str, slot: int, ip: str, http_port: int) -> None:
|
||||
"""Add server configuration to persistent storage with file locking.
|
||||
|
||||
Args:
|
||||
domain: Domain name
|
||||
slot: Server slot (1 to MAX_SLOTS)
|
||||
ip: Server IP address
|
||||
http_port: HTTP port
|
||||
"""
|
||||
lock_path = f"{SERVERS_FILE}.lock"
|
||||
with open(lock_path, 'w') as lock_file:
|
||||
fcntl.flock(lock_file.fileno(), fcntl.LOCK_EX)
|
||||
try:
|
||||
config = load_servers_config()
|
||||
if domain not in config:
|
||||
config[domain] = {}
|
||||
config[domain][str(slot)] = {"ip": ip, "http_port": http_port}
|
||||
save_servers_config(config)
|
||||
finally:
|
||||
fcntl.flock(lock_file.fileno(), fcntl.LOCK_UN)
|
||||
|
||||
|
||||
def remove_server_from_config(domain: str, slot: int) -> None:
|
||||
"""Remove server configuration from persistent storage with file locking.
|
||||
|
||||
Args:
|
||||
domain: Domain name
|
||||
slot: Server slot to remove
|
||||
"""
|
||||
lock_path = f"{SERVERS_FILE}.lock"
|
||||
with open(lock_path, 'w') as lock_file:
|
||||
fcntl.flock(lock_file.fileno(), fcntl.LOCK_EX)
|
||||
try:
|
||||
config = load_servers_config()
|
||||
if domain in config and str(slot) in config[domain]:
|
||||
del config[domain][str(slot)]
|
||||
if not config[domain]:
|
||||
del config[domain]
|
||||
save_servers_config(config)
|
||||
finally:
|
||||
fcntl.flock(lock_file.fileno(), fcntl.LOCK_UN)
|
||||
|
||||
|
||||
def remove_domain_from_config(domain: str) -> None:
|
||||
"""Remove domain from persistent config with file locking.
|
||||
|
||||
Args:
|
||||
domain: Domain name to remove
|
||||
"""
|
||||
lock_path = f"{SERVERS_FILE}.lock"
|
||||
with open(lock_path, 'w') as lock_file:
|
||||
fcntl.flock(lock_file.fileno(), fcntl.LOCK_EX)
|
||||
try:
|
||||
config = load_servers_config()
|
||||
if domain in config:
|
||||
del config[domain]
|
||||
save_servers_config(config)
|
||||
finally:
|
||||
fcntl.flock(lock_file.fileno(), fcntl.LOCK_UN)
|
||||
Reference in New Issue
Block a user