feat: Add SSH remote execution for HAProxy on remote host
MCP server can now manage HAProxy running on a remote host via SSH. When SSH_HOST env var is set, all file I/O and subprocess commands (podman, acme.sh, openssl) are routed through SSH instead of local exec. - Add ssh_ops.py module with remote_exec, run_command, file I/O helpers - Modify file_ops.py to support remote reads/writes via SSH - Update all tools (domains, certificates, health, configuration) for SSH - Fix domains.py: replace direct fcntl usage with file_lock context manager - Add openssh-client to Docker image for SSH connectivity - Update k8s deployment with SSH env vars and SSH key secret mount Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -12,6 +12,7 @@ from .config import (
|
||||
WILDCARDS_MAP_FILE,
|
||||
SERVERS_FILE,
|
||||
CERTS_FILE,
|
||||
REMOTE_MODE,
|
||||
logger,
|
||||
)
|
||||
from .validation import domain_to_backend
|
||||
@@ -21,22 +22,19 @@ from .validation import domain_to_backend
|
||||
def file_lock(lock_path: str) -> Generator[None, None, None]:
|
||||
"""Acquire exclusive file lock for atomic operations.
|
||||
|
||||
This context manager provides a consistent locking mechanism for
|
||||
read-modify-write operations on configuration files to prevent
|
||||
race conditions during concurrent access.
|
||||
In REMOTE_MODE, locking is skipped (single-writer assumption
|
||||
with atomic writes on the remote host).
|
||||
|
||||
Args:
|
||||
lock_path: Path to the lock file (typically config_file.lock)
|
||||
|
||||
Yields:
|
||||
None - the lock is held for the duration of the context
|
||||
|
||||
Example:
|
||||
with file_lock("/path/to/config.json.lock"):
|
||||
config = load_config()
|
||||
config["key"] = "value"
|
||||
save_config(config)
|
||||
"""
|
||||
if REMOTE_MODE:
|
||||
yield
|
||||
return
|
||||
|
||||
with open(lock_path, 'w') as lock_file:
|
||||
fcntl.flock(lock_file.fileno(), fcntl.LOCK_EX)
|
||||
try:
|
||||
@@ -55,6 +53,11 @@ def atomic_write_file(file_path: str, content: str) -> None:
|
||||
Raises:
|
||||
IOError: If write fails
|
||||
"""
|
||||
if REMOTE_MODE:
|
||||
from .ssh_ops import remote_write_file
|
||||
remote_write_file(file_path, content)
|
||||
return
|
||||
|
||||
dir_path = os.path.dirname(file_path)
|
||||
fd = None
|
||||
temp_path = None
|
||||
@@ -91,29 +94,49 @@ def _read_map_file(file_path: str) -> list[tuple[str, str]]:
|
||||
"""
|
||||
entries = []
|
||||
try:
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_SH)
|
||||
except OSError as e:
|
||||
logger.debug("File locking not supported for %s: %s", file_path, e)
|
||||
try:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
parts = line.split()
|
||||
if len(parts) >= 2:
|
||||
entries.append((parts[0], parts[1]))
|
||||
finally:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
||||
except OSError as e:
|
||||
logger.debug("File unlock failed for %s: %s", file_path, e)
|
||||
content = _read_file(file_path)
|
||||
for line in content.splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
parts = line.split()
|
||||
if len(parts) >= 2:
|
||||
entries.append((parts[0], parts[1]))
|
||||
except FileNotFoundError:
|
||||
logger.debug("Map file not found: %s", file_path)
|
||||
return entries
|
||||
|
||||
|
||||
def _read_file(file_path: str) -> str:
|
||||
"""Read a file locally or remotely based on REMOTE_MODE.
|
||||
|
||||
Args:
|
||||
file_path: Path to the file
|
||||
|
||||
Returns:
|
||||
File contents as string
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If file doesn't exist
|
||||
"""
|
||||
if REMOTE_MODE:
|
||||
from .ssh_ops import remote_read_file
|
||||
return remote_read_file(file_path)
|
||||
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_SH)
|
||||
except OSError as e:
|
||||
logger.debug("File locking not supported for %s: %s", file_path, e)
|
||||
try:
|
||||
return f.read()
|
||||
finally:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def get_map_contents() -> list[tuple[str, str]]:
|
||||
"""Read both domains.map and wildcards.map and return combined entries.
|
||||
|
||||
@@ -250,24 +273,14 @@ def get_backend_and_prefix(domain: str) -> tuple[str, str]:
|
||||
|
||||
|
||||
def load_servers_config() -> dict[str, Any]:
|
||||
"""Load servers configuration from JSON file with file locking.
|
||||
"""Load servers configuration from JSON file.
|
||||
|
||||
Returns:
|
||||
Dictionary with server configurations
|
||||
"""
|
||||
try:
|
||||
with open(SERVERS_FILE, "r", encoding="utf-8") as f:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_SH)
|
||||
except OSError:
|
||||
logger.debug("File locking not supported for %s", SERVERS_FILE)
|
||||
try:
|
||||
return json.load(f)
|
||||
finally:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
||||
except OSError:
|
||||
pass
|
||||
content = _read_file(SERVERS_FILE)
|
||||
return json.loads(content)
|
||||
except FileNotFoundError:
|
||||
return {}
|
||||
except json.JSONDecodeError as e:
|
||||
@@ -398,19 +411,9 @@ def load_certs_config() -> list[str]:
|
||||
List of domain names
|
||||
"""
|
||||
try:
|
||||
with open(CERTS_FILE, "r", encoding="utf-8") as f:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_SH)
|
||||
except OSError as e:
|
||||
logger.debug("File locking not supported for %s: %s", CERTS_FILE, e)
|
||||
try:
|
||||
data = json.load(f)
|
||||
return data.get("domains", [])
|
||||
finally:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
|
||||
except OSError as e:
|
||||
logger.debug("File unlock failed for %s: %s", CERTS_FILE, e)
|
||||
content = _read_file(CERTS_FILE)
|
||||
data = json.loads(content)
|
||||
return data.get("domains", [])
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
except json.JSONDecodeError as e:
|
||||
|
||||
Reference in New Issue
Block a user