perf: Implement 2-stage map routing for faster domain lookup
Split domain routing into two stages for improved performance: - Stage 1: map_str for exact domains (O(log n) using ebtree) - Stage 2: map_dom for wildcards only (O(n) but small set) Wildcards now stored in separate wildcards.map file. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -26,6 +26,9 @@ HAPROXY_SOCKET: tuple[str, int] = (HAPROXY_HOST, HAPROXY_PORT)
|
||||
STATE_FILE: str = os.getenv("HAPROXY_STATE_FILE", "/opt/haproxy/data/servers.state")
|
||||
MAP_FILE: str = os.getenv("HAPROXY_MAP_FILE", "/opt/haproxy/conf/domains.map")
|
||||
MAP_FILE_CONTAINER: str = os.getenv("HAPROXY_MAP_FILE_CONTAINER", "/usr/local/etc/haproxy/domains.map")
|
||||
# Wildcards map for 2-stage matching (map_dom fallback)
|
||||
WILDCARDS_MAP_FILE: str = os.getenv("HAPROXY_WILDCARDS_MAP_FILE", "/opt/haproxy/conf/wildcards.map")
|
||||
WILDCARDS_MAP_FILE_CONTAINER: str = os.getenv("HAPROXY_WILDCARDS_MAP_FILE_CONTAINER", "/usr/local/etc/haproxy/wildcards.map")
|
||||
SERVERS_FILE: str = os.getenv("HAPROXY_SERVERS_FILE", "/opt/haproxy/conf/servers.json")
|
||||
CERTS_FILE: str = os.getenv("HAPROXY_CERTS_FILE", "/opt/haproxy/conf/certificates.json")
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ from typing import Any, Optional
|
||||
|
||||
from .config import (
|
||||
MAP_FILE,
|
||||
WILDCARDS_MAP_FILE,
|
||||
SERVERS_FILE,
|
||||
CERTS_FILE,
|
||||
logger,
|
||||
@@ -50,15 +51,18 @@ def atomic_write_file(file_path: str, content: str) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def get_map_contents() -> list[tuple[str, str]]:
|
||||
"""Read domains.map file and return list of (domain, backend) tuples.
|
||||
def _read_map_file(file_path: str) -> list[tuple[str, str]]:
|
||||
"""Read a single map file and return list of (domain, backend) tuples.
|
||||
|
||||
Args:
|
||||
file_path: Path to the map file
|
||||
|
||||
Returns:
|
||||
List of (domain, backend) tuples from the map file
|
||||
"""
|
||||
entries = []
|
||||
try:
|
||||
with open(MAP_FILE, "r", encoding="utf-8") as f:
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
try:
|
||||
fcntl.flock(f.fileno(), fcntl.LOCK_SH)
|
||||
except OSError:
|
||||
@@ -81,10 +85,44 @@ def get_map_contents() -> list[tuple[str, str]]:
|
||||
return entries
|
||||
|
||||
|
||||
def save_map_file(entries: list[tuple[str, str]]) -> None:
|
||||
"""Save entries to domains.map file atomically.
|
||||
def get_map_contents() -> list[tuple[str, str]]:
|
||||
"""Read both domains.map and wildcards.map and return combined entries.
|
||||
|
||||
Uses temp file + rename for atomic write to prevent race conditions.
|
||||
Returns:
|
||||
List of (domain, backend) tuples from both map files
|
||||
"""
|
||||
# Read exact domains
|
||||
entries = _read_map_file(MAP_FILE)
|
||||
# Read wildcards and append
|
||||
entries.extend(_read_map_file(WILDCARDS_MAP_FILE))
|
||||
return entries
|
||||
|
||||
|
||||
def split_domain_entries(entries: list[tuple[str, str]]) -> tuple[list[tuple[str, str]], list[tuple[str, str]]]:
|
||||
"""Split entries into exact domains and wildcards.
|
||||
|
||||
Args:
|
||||
entries: List of (domain, backend) tuples
|
||||
|
||||
Returns:
|
||||
Tuple of (exact_entries, wildcard_entries)
|
||||
"""
|
||||
exact = []
|
||||
wildcards = []
|
||||
for domain, backend in entries:
|
||||
if domain.startswith("."):
|
||||
wildcards.append((domain, backend))
|
||||
else:
|
||||
exact.append((domain, backend))
|
||||
return exact, wildcards
|
||||
|
||||
|
||||
def save_map_file(entries: list[tuple[str, str]]) -> None:
|
||||
"""Save entries to separate map files for 2-stage matching.
|
||||
|
||||
Uses 2-stage matching for performance:
|
||||
- domains.map: Exact domain matches (used with map_str, O(log n))
|
||||
- wildcards.map: Wildcard entries (used with map_dom, O(n))
|
||||
|
||||
Args:
|
||||
entries: List of (domain, backend) tuples to write
|
||||
@@ -92,14 +130,28 @@ def save_map_file(entries: list[tuple[str, str]]) -> None:
|
||||
Raises:
|
||||
IOError: If the file cannot be written
|
||||
"""
|
||||
lines = [
|
||||
"# Domain to Backend mapping\n",
|
||||
# Split into exact and wildcard entries
|
||||
exact_entries, wildcard_entries = split_domain_entries(entries)
|
||||
|
||||
# Save exact domains (for map_str - fast O(log n) lookup)
|
||||
exact_lines = [
|
||||
"# Exact Domain to Backend mapping (for map_str)\n",
|
||||
"# Format: domain backend_name\n",
|
||||
"# Wildcard: .domain.com matches *.domain.com\n\n",
|
||||
"# Uses ebtree for O(log n) lookup performance\n\n",
|
||||
]
|
||||
for domain, backend in entries:
|
||||
lines.append(f"{domain} {backend}\n")
|
||||
atomic_write_file(MAP_FILE, "".join(lines))
|
||||
for domain, backend in sorted(exact_entries):
|
||||
exact_lines.append(f"{domain} {backend}\n")
|
||||
atomic_write_file(MAP_FILE, "".join(exact_lines))
|
||||
|
||||
# Save wildcards (for map_dom - O(n) but small set)
|
||||
wildcard_lines = [
|
||||
"# Wildcard Domain to Backend mapping (for map_dom)\n",
|
||||
"# Format: .domain.com backend_name (matches *.domain.com)\n",
|
||||
"# Uses map_dom for suffix matching\n\n",
|
||||
]
|
||||
for domain, backend in sorted(wildcard_entries):
|
||||
wildcard_lines.append(f"{domain} {backend}\n")
|
||||
atomic_write_file(WILDCARDS_MAP_FILE, "".join(wildcard_lines))
|
||||
|
||||
|
||||
def get_domain_backend(domain: str) -> Optional[str]:
|
||||
|
||||
@@ -10,6 +10,7 @@ from pydantic import Field
|
||||
from ..config import (
|
||||
MAP_FILE,
|
||||
MAP_FILE_CONTAINER,
|
||||
WILDCARDS_MAP_FILE_CONTAINER,
|
||||
POOL_COUNT,
|
||||
MAX_SLOTS,
|
||||
StateField,
|
||||
@@ -199,11 +200,12 @@ def register_domain_tools(mcp):
|
||||
except IOError as e:
|
||||
return f"Error: Failed to save map file: {e}"
|
||||
|
||||
# Then update HAProxy map via Runtime API
|
||||
# Then update HAProxy maps via Runtime API
|
||||
# 2-stage matching: exact domains go to domains.map, wildcards go to wildcards.map
|
||||
try:
|
||||
haproxy_cmd(f"add map {MAP_FILE_CONTAINER} {domain} {pool}")
|
||||
if not is_subdomain:
|
||||
haproxy_cmd(f"add map {MAP_FILE_CONTAINER} .{domain} {pool}")
|
||||
haproxy_cmd(f"add map {WILDCARDS_MAP_FILE_CONTAINER} .{domain} {pool}")
|
||||
except HaproxyError as e:
|
||||
# Rollback: remove the domain we just added from entries and re-save
|
||||
rollback_entries = [(d, b) for d, b in entries if d != domain and d != f".{domain}"]
|
||||
@@ -276,9 +278,10 @@ def register_domain_tools(mcp):
|
||||
remove_domain_from_config(domain)
|
||||
|
||||
# Clear map entries via Runtime API (immediate effect)
|
||||
# 2-stage matching: exact from domains.map, wildcard from wildcards.map
|
||||
haproxy_cmd(f"del map {MAP_FILE_CONTAINER} {domain}")
|
||||
try:
|
||||
haproxy_cmd(f"del map {MAP_FILE_CONTAINER} .{domain}")
|
||||
haproxy_cmd(f"del map {WILDCARDS_MAP_FILE_CONTAINER} .{domain}")
|
||||
except HaproxyError as e:
|
||||
logger.warning("Failed to remove wildcard entry for %s: %s", domain, e)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user