refactor: migrate data storage from JSON/map files to SQLite
Replace servers.json, certificates.json, and map file parsing with SQLite (WAL mode) as single source of truth. HAProxy map files are now generated from SQLite via sync_map_files(). Key changes: - Add db.py with schema, connection management, and JSON migration - Add DB_FILE config constant - Delegate file_ops.py functions to db.py - Refactor domains.py to use file_ops instead of direct list manipulation - Fix subprocess.TimeoutExpired not caught (doesn't inherit TimeoutError) - Add DB health check in health.py - Init DB on startup in server.py and __main__.py - Update all 359 tests to use SQLite-backed functions Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -10,6 +10,7 @@ from pydantic import Field
|
||||
from ..config import (
|
||||
MAP_FILE,
|
||||
SERVERS_FILE,
|
||||
DB_FILE,
|
||||
HAPROXY_CONTAINER,
|
||||
)
|
||||
from ..exceptions import HaproxyError
|
||||
@@ -88,7 +89,7 @@ def register_health_tools(mcp):
|
||||
# Check configuration files
|
||||
files_ok = True
|
||||
file_status: dict[str, str] = {}
|
||||
for name, path in [("map_file", MAP_FILE), ("servers_file", SERVERS_FILE)]:
|
||||
for name, path in [("map_file", MAP_FILE), ("db_file", DB_FILE)]:
|
||||
exists = remote_file_exists(path) if REMOTE_MODE else __import__('os').path.exists(path)
|
||||
if exists:
|
||||
file_status[name] = "ok"
|
||||
|
||||
Reference in New Issue
Block a user