Store SQLite DB on remote host via SCP for persistence
Instead of syncing JSON files back, the SQLite DB itself is now the persistent store on the remote HAProxy host: - Startup: download remote DB via SCP (skip migration if exists) - After writes: upload local DB via SCP (WAL checkpoint first) - JSON sync removed (sync_servers_json, sync_certs_json deleted) New functions: - ssh_ops: remote_download_file(), remote_upload_file() via SCP - db: sync_db_to_remote(), _try_download_remote_db() Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -32,6 +32,7 @@ WILDCARDS_MAP_FILE_CONTAINER: str = os.getenv("HAPROXY_WILDCARDS_MAP_FILE_CONTAI
|
||||
SERVERS_FILE: str = os.getenv("HAPROXY_SERVERS_FILE", "/opt/haproxy/conf/servers.json")
|
||||
CERTS_FILE: str = os.getenv("HAPROXY_CERTS_FILE", "/opt/haproxy/conf/certificates.json")
|
||||
DB_FILE: str = os.getenv("HAPROXY_DB_FILE", "/opt/haproxy/conf/haproxy_mcp.db")
|
||||
REMOTE_DB_FILE: str = os.getenv("HAPROXY_REMOTE_DB_FILE", "/opt/haproxy/conf/haproxy_mcp.db")
|
||||
|
||||
# Certificate paths
|
||||
CERTS_DIR: str = os.getenv("HAPROXY_CERTS_DIR", "/opt/haproxy/certs")
|
||||
|
||||
@@ -16,6 +16,7 @@ from typing import Any, Optional
|
||||
|
||||
from .config import (
|
||||
DB_FILE,
|
||||
REMOTE_DB_FILE,
|
||||
MAP_FILE,
|
||||
WILDCARDS_MAP_FILE,
|
||||
SERVERS_FILE,
|
||||
@@ -60,14 +61,18 @@ def close_connection() -> None:
|
||||
def init_db() -> None:
|
||||
"""Initialize database schema and run migration if needed.
|
||||
|
||||
Creates tables if they don't exist, then checks for existing
|
||||
JSON/map files to migrate data from.
|
||||
In REMOTE_MODE, tries to download existing DB from the remote host first.
|
||||
If no remote DB exists, creates a new one and migrates from JSON files.
|
||||
"""
|
||||
# Ensure parent directory exists for the database file
|
||||
db_dir = os.path.dirname(DB_FILE)
|
||||
if db_dir:
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
|
||||
# In REMOTE_MODE, try to restore DB from remote host
|
||||
if REMOTE_MODE:
|
||||
_try_download_remote_db()
|
||||
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
|
||||
@@ -123,10 +128,30 @@ def init_db() -> None:
|
||||
migrate_from_json()
|
||||
cur.execute("INSERT INTO schema_version (version) VALUES (?)", (SCHEMA_VERSION,))
|
||||
conn.commit()
|
||||
# Upload newly created DB to remote for persistence
|
||||
if REMOTE_MODE:
|
||||
sync_db_to_remote()
|
||||
|
||||
logger.info("Database initialized (schema v%d)", SCHEMA_VERSION)
|
||||
|
||||
|
||||
def _try_download_remote_db() -> None:
|
||||
"""Try to download existing DB from remote host.
|
||||
|
||||
If the remote DB exists, downloads it to the local DB_FILE path.
|
||||
If not, does nothing (init_db will create a fresh DB).
|
||||
"""
|
||||
from .ssh_ops import remote_download_file, remote_file_exists
|
||||
|
||||
if remote_file_exists(REMOTE_DB_FILE):
|
||||
if remote_download_file(REMOTE_DB_FILE, DB_FILE):
|
||||
logger.info("Downloaded remote DB from %s", REMOTE_DB_FILE)
|
||||
else:
|
||||
logger.warning("Failed to download remote DB, will create new")
|
||||
else:
|
||||
logger.info("No remote DB found at %s, will create new", REMOTE_DB_FILE)
|
||||
|
||||
|
||||
def migrate_from_json() -> None:
|
||||
"""Migrate data from JSON/map files to SQLite.
|
||||
|
||||
@@ -582,29 +607,23 @@ def sync_map_files() -> None:
|
||||
len(exact_entries), len(wildcard_entries))
|
||||
|
||||
|
||||
def sync_servers_json() -> None:
|
||||
"""Write servers configuration back to servers.json for persistence.
|
||||
def sync_db_to_remote() -> None:
|
||||
"""Upload local SQLite DB to remote host for persistence.
|
||||
|
||||
Ensures the remote JSON file stays in sync with SQLite so that
|
||||
pod restarts can re-migrate without data loss.
|
||||
Checkpoints WAL first to merge all changes into the main DB file,
|
||||
then uploads via SCP. No-op in local (non-remote) mode.
|
||||
"""
|
||||
from .file_ops import atomic_write_file
|
||||
if not REMOTE_MODE:
|
||||
return
|
||||
|
||||
config = db_load_servers_config()
|
||||
content = json.dumps(config, indent=2)
|
||||
atomic_write_file(SERVERS_FILE, content)
|
||||
logger.debug("Synced servers.json: %d domains", len(config))
|
||||
from .ssh_ops import remote_upload_file
|
||||
|
||||
try:
|
||||
# Merge WAL into main DB file before upload
|
||||
conn = get_connection()
|
||||
conn.execute("PRAGMA wal_checkpoint(TRUNCATE)")
|
||||
|
||||
def sync_certs_json() -> None:
|
||||
"""Write certificates list back to certificates.json for persistence.
|
||||
|
||||
Ensures the remote JSON file stays in sync with SQLite so that
|
||||
pod restarts can re-migrate without data loss.
|
||||
"""
|
||||
from .file_ops import atomic_write_file
|
||||
|
||||
domains = db_load_certs()
|
||||
content = json.dumps({"domains": domains}, indent=2)
|
||||
atomic_write_file(CERTS_FILE, content)
|
||||
logger.debug("Synced certificates.json: %d domains", len(domains))
|
||||
remote_upload_file(DB_FILE, REMOTE_DB_FILE)
|
||||
logger.debug("Synced DB to remote: %s", REMOTE_DB_FILE)
|
||||
except (IOError, OSError) as e:
|
||||
logger.warning("Failed to sync DB to remote: %s", e)
|
||||
|
||||
@@ -265,9 +265,9 @@ def add_server_to_config(domain: str, slot: int, ip: str, http_port: int) -> Non
|
||||
ip: Server IP address
|
||||
http_port: HTTP port
|
||||
"""
|
||||
from .db import db_add_server, sync_servers_json
|
||||
from .db import db_add_server, sync_db_to_remote
|
||||
db_add_server(domain, slot, ip, http_port)
|
||||
sync_servers_json()
|
||||
sync_db_to_remote()
|
||||
|
||||
|
||||
def remove_server_from_config(domain: str, slot: int) -> None:
|
||||
@@ -277,9 +277,9 @@ def remove_server_from_config(domain: str, slot: int) -> None:
|
||||
domain: Domain name
|
||||
slot: Server slot to remove
|
||||
"""
|
||||
from .db import db_remove_server, sync_servers_json
|
||||
from .db import db_remove_server, sync_db_to_remote
|
||||
db_remove_server(domain, slot)
|
||||
sync_servers_json()
|
||||
sync_db_to_remote()
|
||||
|
||||
|
||||
def remove_domain_from_config(domain: str) -> None:
|
||||
@@ -288,9 +288,9 @@ def remove_domain_from_config(domain: str) -> None:
|
||||
Args:
|
||||
domain: Domain name to remove
|
||||
"""
|
||||
from .db import db_remove_domain_servers, sync_servers_json
|
||||
from .db import db_remove_domain_servers, sync_db_to_remote
|
||||
db_remove_domain_servers(domain)
|
||||
sync_servers_json()
|
||||
sync_db_to_remote()
|
||||
|
||||
|
||||
def get_shared_domain(domain: str) -> Optional[str]:
|
||||
@@ -313,9 +313,9 @@ def add_shared_domain_to_config(domain: str, shares_with: str) -> None:
|
||||
domain: New domain name
|
||||
shares_with: Existing domain to share pool with
|
||||
"""
|
||||
from .db import db_add_shared_domain, sync_servers_json
|
||||
from .db import db_add_shared_domain, sync_db_to_remote
|
||||
db_add_shared_domain(domain, shares_with)
|
||||
sync_servers_json()
|
||||
sync_db_to_remote()
|
||||
|
||||
|
||||
def get_domains_sharing_pool(pool: str) -> list[str]:
|
||||
@@ -362,9 +362,9 @@ def add_cert_to_config(domain: str) -> None:
|
||||
Args:
|
||||
domain: Domain name to add
|
||||
"""
|
||||
from .db import db_add_cert, sync_certs_json
|
||||
from .db import db_add_cert, sync_db_to_remote
|
||||
db_add_cert(domain)
|
||||
sync_certs_json()
|
||||
sync_db_to_remote()
|
||||
|
||||
|
||||
def remove_cert_from_config(domain: str) -> None:
|
||||
@@ -373,9 +373,9 @@ def remove_cert_from_config(domain: str) -> None:
|
||||
Args:
|
||||
domain: Domain name to remove
|
||||
"""
|
||||
from .db import db_remove_cert, sync_certs_json
|
||||
from .db import db_remove_cert, sync_db_to_remote
|
||||
db_remove_cert(domain)
|
||||
sync_certs_json()
|
||||
sync_db_to_remote()
|
||||
|
||||
|
||||
# Domain map helper functions (used by domains.py)
|
||||
|
||||
@@ -125,6 +125,58 @@ def remote_file_exists(path: str) -> bool:
|
||||
return result.stdout.strip() == "yes"
|
||||
|
||||
|
||||
def _scp_base_cmd() -> list[str]:
|
||||
"""Build base SCP command with options."""
|
||||
cmd = [
|
||||
"scp",
|
||||
"-o", "StrictHostKeyChecking=no",
|
||||
"-o", "UserKnownHostsFile=/dev/null",
|
||||
"-o", "LogLevel=ERROR",
|
||||
"-o", "BatchMode=yes",
|
||||
"-o", "ConnectTimeout=10",
|
||||
"-P", str(SSH_PORT),
|
||||
]
|
||||
if SSH_KEY:
|
||||
cmd.extend(["-i", SSH_KEY])
|
||||
return cmd
|
||||
|
||||
|
||||
def remote_download_file(remote_path: str, local_path: str) -> bool:
|
||||
"""Download a binary file from the remote host via SCP.
|
||||
|
||||
Args:
|
||||
remote_path: Absolute file path on remote host
|
||||
local_path: Absolute local file path to write to
|
||||
|
||||
Returns:
|
||||
True if downloaded successfully, False if file doesn't exist
|
||||
"""
|
||||
cmd = _scp_base_cmd() + [f"{SSH_USER}@{SSH_HOST}:{remote_path}", local_path]
|
||||
logger.debug("SCP download: %s -> %s", remote_path, local_path)
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, timeout=SUBPROCESS_TIMEOUT)
|
||||
if result.returncode != 0:
|
||||
logger.debug("SCP download failed: %s", result.stderr.strip())
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def remote_upload_file(local_path: str, remote_path: str) -> None:
|
||||
"""Upload a binary file to the remote host via SCP.
|
||||
|
||||
Args:
|
||||
local_path: Absolute local file path to upload
|
||||
remote_path: Absolute file path on remote host
|
||||
|
||||
Raises:
|
||||
IOError: If upload fails
|
||||
"""
|
||||
cmd = _scp_base_cmd() + [local_path, f"{SSH_USER}@{SSH_HOST}:{remote_path}"]
|
||||
logger.debug("SCP upload: %s -> %s", local_path, remote_path)
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, timeout=SUBPROCESS_TIMEOUT)
|
||||
if result.returncode != 0:
|
||||
raise IOError(f"SCP upload failed: {result.stderr.strip()}")
|
||||
|
||||
|
||||
def run_command(args: list[str], timeout: int = SUBPROCESS_TIMEOUT) -> subprocess.CompletedProcess:
|
||||
"""Execute a command locally or remotely based on REMOTE_MODE.
|
||||
|
||||
|
||||
@@ -296,6 +296,7 @@ def patch_config_paths(temp_config_dir):
|
||||
SERVERS_FILE=temp_config_dir["servers_file"],
|
||||
CERTS_FILE=temp_config_dir["certs_file"],
|
||||
DB_FILE=temp_config_dir["db_file"],
|
||||
REMOTE_DB_FILE=temp_config_dir["db_file"],
|
||||
):
|
||||
# Patch health module which imports MAP_FILE and DB_FILE
|
||||
with patch.multiple(
|
||||
|
||||
Reference in New Issue
Block a user