refactor: migrate data storage from JSON/map files to SQLite
Replace servers.json, certificates.json, and map file parsing with SQLite (WAL mode) as single source of truth. HAProxy map files are now generated from SQLite via sync_map_files(). Key changes: - Add db.py with schema, connection management, and JSON migration - Add DB_FILE config constant - Delegate file_ops.py functions to db.py - Refactor domains.py to use file_ops instead of direct list manipulation - Fix subprocess.TimeoutExpired not caught (doesn't inherit TimeoutError) - Add DB health check in health.py - Init DB on startup in server.py and __main__.py - Update all 359 tests to use SQLite-backed functions Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1,7 +1,11 @@
|
||||
"""File I/O operations for HAProxy MCP Server."""
|
||||
"""File I/O operations for HAProxy MCP Server.
|
||||
|
||||
Most data access is now delegated to db.py (SQLite).
|
||||
This module retains atomic file writes, map file I/O for HAProxy,
|
||||
and provides backward-compatible function signatures.
|
||||
"""
|
||||
|
||||
import fcntl
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
@@ -10,8 +14,6 @@ from typing import Any, Generator, Optional
|
||||
from .config import (
|
||||
MAP_FILE,
|
||||
WILDCARDS_MAP_FILE,
|
||||
SERVERS_FILE,
|
||||
CERTS_FILE,
|
||||
REMOTE_MODE,
|
||||
logger,
|
||||
)
|
||||
@@ -138,16 +140,13 @@ def _read_file(file_path: str) -> str:
|
||||
|
||||
|
||||
def get_map_contents() -> list[tuple[str, str]]:
|
||||
"""Read both domains.map and wildcards.map and return combined entries.
|
||||
"""Get all domain-to-backend mappings from SQLite.
|
||||
|
||||
Returns:
|
||||
List of (domain, backend) tuples from both map files
|
||||
List of (domain, backend) tuples including wildcards.
|
||||
"""
|
||||
# Read exact domains
|
||||
entries = _read_map_file(MAP_FILE)
|
||||
# Read wildcards and append
|
||||
entries.extend(_read_map_file(WILDCARDS_MAP_FILE))
|
||||
return entries
|
||||
from .db import db_get_map_contents
|
||||
return db_get_map_contents()
|
||||
|
||||
|
||||
def split_domain_entries(entries: list[tuple[str, str]]) -> tuple[list[tuple[str, str]], list[tuple[str, str]]]:
|
||||
@@ -170,44 +169,21 @@ def split_domain_entries(entries: list[tuple[str, str]]) -> tuple[list[tuple[str
|
||||
|
||||
|
||||
def save_map_file(entries: list[tuple[str, str]]) -> None:
|
||||
"""Save domain-to-backend entries to map files.
|
||||
"""Sync map files from the database.
|
||||
|
||||
Splits entries into two files for 2-stage routing:
|
||||
- domains.map: Exact matches (map_str, O(log n))
|
||||
- wildcards.map: Wildcard entries starting with "." (map_dom, O(n))
|
||||
|
||||
Args:
|
||||
entries: List of (domain, backend) tuples.
|
||||
Regenerates domains.map and wildcards.map from the current
|
||||
database state. The entries parameter is ignored (kept for
|
||||
backward compatibility during transition).
|
||||
|
||||
Raises:
|
||||
IOError: If map files cannot be written.
|
||||
"""
|
||||
# Split into exact and wildcard entries
|
||||
exact_entries, wildcard_entries = split_domain_entries(entries)
|
||||
|
||||
# Save exact domains (for map_str - fast O(log n) lookup)
|
||||
exact_lines = [
|
||||
"# Exact Domain to Backend mapping (for map_str)\n",
|
||||
"# Format: domain backend_name\n",
|
||||
"# Uses ebtree for O(log n) lookup performance\n\n",
|
||||
]
|
||||
for domain, backend in sorted(exact_entries):
|
||||
exact_lines.append(f"{domain} {backend}\n")
|
||||
atomic_write_file(MAP_FILE, "".join(exact_lines))
|
||||
|
||||
# Save wildcards (for map_dom - O(n) but small set)
|
||||
wildcard_lines = [
|
||||
"# Wildcard Domain to Backend mapping (for map_dom)\n",
|
||||
"# Format: .domain.com backend_name (matches *.domain.com)\n",
|
||||
"# Uses map_dom for suffix matching\n\n",
|
||||
]
|
||||
for domain, backend in sorted(wildcard_entries):
|
||||
wildcard_lines.append(f"{domain} {backend}\n")
|
||||
atomic_write_file(WILDCARDS_MAP_FILE, "".join(wildcard_lines))
|
||||
from .db import sync_map_files
|
||||
sync_map_files()
|
||||
|
||||
|
||||
def get_domain_backend(domain: str) -> Optional[str]:
|
||||
"""Look up the backend for a domain from domains.map.
|
||||
"""Look up the backend for a domain from SQLite (O(1)).
|
||||
|
||||
Args:
|
||||
domain: The domain to look up
|
||||
@@ -215,10 +191,8 @@ def get_domain_backend(domain: str) -> Optional[str]:
|
||||
Returns:
|
||||
Backend name if found, None otherwise
|
||||
"""
|
||||
for map_domain, backend in get_map_contents():
|
||||
if map_domain == domain:
|
||||
return backend
|
||||
return None
|
||||
from .db import db_get_domain_backend
|
||||
return db_get_domain_backend(domain)
|
||||
|
||||
|
||||
def is_legacy_backend(backend: str) -> bool:
|
||||
@@ -273,34 +247,17 @@ def get_backend_and_prefix(domain: str) -> tuple[str, str]:
|
||||
|
||||
|
||||
def load_servers_config() -> dict[str, Any]:
|
||||
"""Load servers configuration from JSON file.
|
||||
"""Load servers configuration from SQLite.
|
||||
|
||||
Returns:
|
||||
Dictionary with server configurations
|
||||
Dictionary with server configurations (legacy format compatible).
|
||||
"""
|
||||
try:
|
||||
content = _read_file(SERVERS_FILE)
|
||||
return json.loads(content)
|
||||
except FileNotFoundError:
|
||||
return {}
|
||||
except json.JSONDecodeError as e:
|
||||
logger.warning("Corrupt config file %s: %s", SERVERS_FILE, e)
|
||||
return {}
|
||||
|
||||
|
||||
def save_servers_config(config: dict[str, Any]) -> None:
|
||||
"""Save servers configuration to JSON file atomically.
|
||||
|
||||
Uses temp file + rename for atomic write to prevent race conditions.
|
||||
|
||||
Args:
|
||||
config: Dictionary with server configurations
|
||||
"""
|
||||
atomic_write_file(SERVERS_FILE, json.dumps(config, indent=2))
|
||||
from .db import db_load_servers_config
|
||||
return db_load_servers_config()
|
||||
|
||||
|
||||
def add_server_to_config(domain: str, slot: int, ip: str, http_port: int) -> None:
|
||||
"""Add server configuration to persistent storage with file locking.
|
||||
"""Add server configuration to persistent storage.
|
||||
|
||||
Args:
|
||||
domain: Domain name
|
||||
@@ -308,41 +265,29 @@ def add_server_to_config(domain: str, slot: int, ip: str, http_port: int) -> Non
|
||||
ip: Server IP address
|
||||
http_port: HTTP port
|
||||
"""
|
||||
with file_lock(f"{SERVERS_FILE}.lock"):
|
||||
config = load_servers_config()
|
||||
if domain not in config:
|
||||
config[domain] = {}
|
||||
config[domain][str(slot)] = {"ip": ip, "http_port": http_port}
|
||||
save_servers_config(config)
|
||||
from .db import db_add_server
|
||||
db_add_server(domain, slot, ip, http_port)
|
||||
|
||||
|
||||
def remove_server_from_config(domain: str, slot: int) -> None:
|
||||
"""Remove server configuration from persistent storage with file locking.
|
||||
"""Remove server configuration from persistent storage.
|
||||
|
||||
Args:
|
||||
domain: Domain name
|
||||
slot: Server slot to remove
|
||||
"""
|
||||
with file_lock(f"{SERVERS_FILE}.lock"):
|
||||
config = load_servers_config()
|
||||
if domain in config and str(slot) in config[domain]:
|
||||
del config[domain][str(slot)]
|
||||
if not config[domain]:
|
||||
del config[domain]
|
||||
save_servers_config(config)
|
||||
from .db import db_remove_server
|
||||
db_remove_server(domain, slot)
|
||||
|
||||
|
||||
def remove_domain_from_config(domain: str) -> None:
|
||||
"""Remove domain from persistent config with file locking.
|
||||
"""Remove domain from persistent config (servers + domain entry).
|
||||
|
||||
Args:
|
||||
domain: Domain name to remove
|
||||
"""
|
||||
with file_lock(f"{SERVERS_FILE}.lock"):
|
||||
config = load_servers_config()
|
||||
if domain in config:
|
||||
del config[domain]
|
||||
save_servers_config(config)
|
||||
from .db import db_remove_domain_servers
|
||||
db_remove_domain_servers(domain)
|
||||
|
||||
|
||||
def get_shared_domain(domain: str) -> Optional[str]:
|
||||
@@ -354,9 +299,8 @@ def get_shared_domain(domain: str) -> Optional[str]:
|
||||
Returns:
|
||||
Parent domain name if sharing, None otherwise
|
||||
"""
|
||||
config = load_servers_config()
|
||||
domain_config = config.get(domain, {})
|
||||
return domain_config.get("_shares")
|
||||
from .db import db_get_shared_domain
|
||||
return db_get_shared_domain(domain)
|
||||
|
||||
|
||||
def add_shared_domain_to_config(domain: str, shares_with: str) -> None:
|
||||
@@ -366,10 +310,8 @@ def add_shared_domain_to_config(domain: str, shares_with: str) -> None:
|
||||
domain: New domain name
|
||||
shares_with: Existing domain to share pool with
|
||||
"""
|
||||
with file_lock(f"{SERVERS_FILE}.lock"):
|
||||
config = load_servers_config()
|
||||
config[domain] = {"_shares": shares_with}
|
||||
save_servers_config(config)
|
||||
from .db import db_add_shared_domain
|
||||
db_add_shared_domain(domain, shares_with)
|
||||
|
||||
|
||||
def get_domains_sharing_pool(pool: str) -> list[str]:
|
||||
@@ -381,11 +323,8 @@ def get_domains_sharing_pool(pool: str) -> list[str]:
|
||||
Returns:
|
||||
List of domain names using this pool
|
||||
"""
|
||||
domains = []
|
||||
for domain, backend in get_map_contents():
|
||||
if backend == pool and not domain.startswith("."):
|
||||
domains.append(domain)
|
||||
return domains
|
||||
from .db import db_get_domains_sharing_pool
|
||||
return db_get_domains_sharing_pool(pool)
|
||||
|
||||
|
||||
def is_shared_domain(domain: str) -> bool:
|
||||
@@ -397,37 +336,20 @@ def is_shared_domain(domain: str) -> bool:
|
||||
Returns:
|
||||
True if domain has _shares reference, False otherwise
|
||||
"""
|
||||
config = load_servers_config()
|
||||
domain_config = config.get(domain, {})
|
||||
return "_shares" in domain_config
|
||||
from .db import db_is_shared_domain
|
||||
return db_is_shared_domain(domain)
|
||||
|
||||
|
||||
# Certificate configuration functions
|
||||
|
||||
def load_certs_config() -> list[str]:
|
||||
"""Load certificate domain list from JSON file.
|
||||
"""Load certificate domain list from SQLite.
|
||||
|
||||
Returns:
|
||||
List of domain names
|
||||
Sorted list of domain names.
|
||||
"""
|
||||
try:
|
||||
content = _read_file(CERTS_FILE)
|
||||
data = json.loads(content)
|
||||
return data.get("domains", [])
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
except json.JSONDecodeError as e:
|
||||
logger.warning("Corrupt certificates config %s: %s", CERTS_FILE, e)
|
||||
return []
|
||||
|
||||
|
||||
def save_certs_config(domains: list[str]) -> None:
|
||||
"""Save certificate domain list to JSON file atomically.
|
||||
|
||||
Args:
|
||||
domains: List of domain names
|
||||
"""
|
||||
atomic_write_file(CERTS_FILE, json.dumps({"domains": sorted(domains)}, indent=2))
|
||||
from .db import db_load_certs
|
||||
return db_load_certs()
|
||||
|
||||
|
||||
def add_cert_to_config(domain: str) -> None:
|
||||
@@ -436,11 +358,8 @@ def add_cert_to_config(domain: str) -> None:
|
||||
Args:
|
||||
domain: Domain name to add
|
||||
"""
|
||||
with file_lock(f"{CERTS_FILE}.lock"):
|
||||
domains = load_certs_config()
|
||||
if domain not in domains:
|
||||
domains.append(domain)
|
||||
save_certs_config(domains)
|
||||
from .db import db_add_cert
|
||||
db_add_cert(domain)
|
||||
|
||||
|
||||
def remove_cert_from_config(domain: str) -> None:
|
||||
@@ -449,8 +368,45 @@ def remove_cert_from_config(domain: str) -> None:
|
||||
Args:
|
||||
domain: Domain name to remove
|
||||
"""
|
||||
with file_lock(f"{CERTS_FILE}.lock"):
|
||||
domains = load_certs_config()
|
||||
if domain in domains:
|
||||
domains.remove(domain)
|
||||
save_certs_config(domains)
|
||||
from .db import db_remove_cert
|
||||
db_remove_cert(domain)
|
||||
|
||||
|
||||
# Domain map helper functions (used by domains.py)
|
||||
|
||||
def add_domain_to_map(domain: str, backend: str, is_wildcard: bool = False,
|
||||
shares_with: Optional[str] = None) -> None:
|
||||
"""Add a domain to SQLite and sync map files.
|
||||
|
||||
Args:
|
||||
domain: Domain name (e.g., "example.com").
|
||||
backend: Backend pool name (e.g., "pool_5").
|
||||
is_wildcard: Whether this is a wildcard entry.
|
||||
shares_with: Parent domain if sharing a pool.
|
||||
"""
|
||||
from .db import db_add_domain, sync_map_files
|
||||
db_add_domain(domain, backend, is_wildcard, shares_with)
|
||||
sync_map_files()
|
||||
|
||||
|
||||
def remove_domain_from_map(domain: str) -> None:
|
||||
"""Remove a domain (exact + wildcard) from SQLite and sync map files.
|
||||
|
||||
Args:
|
||||
domain: Base domain name (without leading dot).
|
||||
"""
|
||||
from .db import db_remove_domain, sync_map_files
|
||||
db_remove_domain(domain)
|
||||
sync_map_files()
|
||||
|
||||
|
||||
def find_available_pool() -> Optional[str]:
|
||||
"""Find the first available pool not assigned to any domain.
|
||||
|
||||
Uses SQLite query for O(1) lookup vs previous O(n) list scan.
|
||||
|
||||
Returns:
|
||||
Pool name (e.g., "pool_5") or None if all pools are in use.
|
||||
"""
|
||||
from .db import db_find_available_pool
|
||||
return db_find_available_pool()
|
||||
|
||||
Reference in New Issue
Block a user