Store SQLite DB on remote host via SCP for persistence
Instead of syncing JSON files back, the SQLite DB itself is now the persistent store on the remote HAProxy host: - Startup: download remote DB via SCP (skip migration if exists) - After writes: upload local DB via SCP (WAL checkpoint first) - JSON sync removed (sync_servers_json, sync_certs_json deleted) New functions: - ssh_ops: remote_download_file(), remote_upload_file() via SCP - db: sync_db_to_remote(), _try_download_remote_db() Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -16,6 +16,7 @@ from typing import Any, Optional
|
||||
|
||||
from .config import (
|
||||
DB_FILE,
|
||||
REMOTE_DB_FILE,
|
||||
MAP_FILE,
|
||||
WILDCARDS_MAP_FILE,
|
||||
SERVERS_FILE,
|
||||
@@ -60,14 +61,18 @@ def close_connection() -> None:
|
||||
def init_db() -> None:
|
||||
"""Initialize database schema and run migration if needed.
|
||||
|
||||
Creates tables if they don't exist, then checks for existing
|
||||
JSON/map files to migrate data from.
|
||||
In REMOTE_MODE, tries to download existing DB from the remote host first.
|
||||
If no remote DB exists, creates a new one and migrates from JSON files.
|
||||
"""
|
||||
# Ensure parent directory exists for the database file
|
||||
db_dir = os.path.dirname(DB_FILE)
|
||||
if db_dir:
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
|
||||
# In REMOTE_MODE, try to restore DB from remote host
|
||||
if REMOTE_MODE:
|
||||
_try_download_remote_db()
|
||||
|
||||
conn = get_connection()
|
||||
cur = conn.cursor()
|
||||
|
||||
@@ -123,10 +128,30 @@ def init_db() -> None:
|
||||
migrate_from_json()
|
||||
cur.execute("INSERT INTO schema_version (version) VALUES (?)", (SCHEMA_VERSION,))
|
||||
conn.commit()
|
||||
# Upload newly created DB to remote for persistence
|
||||
if REMOTE_MODE:
|
||||
sync_db_to_remote()
|
||||
|
||||
logger.info("Database initialized (schema v%d)", SCHEMA_VERSION)
|
||||
|
||||
|
||||
def _try_download_remote_db() -> None:
|
||||
"""Try to download existing DB from remote host.
|
||||
|
||||
If the remote DB exists, downloads it to the local DB_FILE path.
|
||||
If not, does nothing (init_db will create a fresh DB).
|
||||
"""
|
||||
from .ssh_ops import remote_download_file, remote_file_exists
|
||||
|
||||
if remote_file_exists(REMOTE_DB_FILE):
|
||||
if remote_download_file(REMOTE_DB_FILE, DB_FILE):
|
||||
logger.info("Downloaded remote DB from %s", REMOTE_DB_FILE)
|
||||
else:
|
||||
logger.warning("Failed to download remote DB, will create new")
|
||||
else:
|
||||
logger.info("No remote DB found at %s, will create new", REMOTE_DB_FILE)
|
||||
|
||||
|
||||
def migrate_from_json() -> None:
|
||||
"""Migrate data from JSON/map files to SQLite.
|
||||
|
||||
@@ -582,29 +607,23 @@ def sync_map_files() -> None:
|
||||
len(exact_entries), len(wildcard_entries))
|
||||
|
||||
|
||||
def sync_servers_json() -> None:
|
||||
"""Write servers configuration back to servers.json for persistence.
|
||||
def sync_db_to_remote() -> None:
|
||||
"""Upload local SQLite DB to remote host for persistence.
|
||||
|
||||
Ensures the remote JSON file stays in sync with SQLite so that
|
||||
pod restarts can re-migrate without data loss.
|
||||
Checkpoints WAL first to merge all changes into the main DB file,
|
||||
then uploads via SCP. No-op in local (non-remote) mode.
|
||||
"""
|
||||
from .file_ops import atomic_write_file
|
||||
if not REMOTE_MODE:
|
||||
return
|
||||
|
||||
config = db_load_servers_config()
|
||||
content = json.dumps(config, indent=2)
|
||||
atomic_write_file(SERVERS_FILE, content)
|
||||
logger.debug("Synced servers.json: %d domains", len(config))
|
||||
from .ssh_ops import remote_upload_file
|
||||
|
||||
try:
|
||||
# Merge WAL into main DB file before upload
|
||||
conn = get_connection()
|
||||
conn.execute("PRAGMA wal_checkpoint(TRUNCATE)")
|
||||
|
||||
def sync_certs_json() -> None:
|
||||
"""Write certificates list back to certificates.json for persistence.
|
||||
|
||||
Ensures the remote JSON file stays in sync with SQLite so that
|
||||
pod restarts can re-migrate without data loss.
|
||||
"""
|
||||
from .file_ops import atomic_write_file
|
||||
|
||||
domains = db_load_certs()
|
||||
content = json.dumps({"domains": domains}, indent=2)
|
||||
atomic_write_file(CERTS_FILE, content)
|
||||
logger.debug("Synced certificates.json: %d domains", len(domains))
|
||||
remote_upload_file(DB_FILE, REMOTE_DB_FILE)
|
||||
logger.debug("Synced DB to remote: %s", REMOTE_DB_FILE)
|
||||
except (IOError, OSError) as e:
|
||||
logger.warning("Failed to sync DB to remote: %s", e)
|
||||
|
||||
Reference in New Issue
Block a user