Add infra-tool: infrastructure registry with Incus container deployment
Service registry & discovery system that aggregates infrastructure metadata from Incus, K8s, APISIX, and BunnyCDN into NocoDB. Includes FastAPI HTTP API, systemd timer for 15-min auto-sync, and dual-mode collectors (REST API for container deployment, CLI/SSH fallback for local use). Deployed to jp1:infra-tool with Tailscale socket proxy for host network visibility. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
101
collectors/apisix.py
Normal file
101
collectors/apisix.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""Collect APISIX routes and upstreams via Admin API."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import requests
|
||||
|
||||
import config
|
||||
|
||||
|
||||
def _get(path: str) -> dict:
|
||||
url = f"{config.APISIX_ADMIN_URL}/apisix/admin{path}"
|
||||
resp = requests.get(url, headers={"X-API-KEY": config.apisix_admin_key()})
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
|
||||
|
||||
def _upstream_map() -> dict[str, dict]:
|
||||
"""Map upstream ID → upstream object."""
|
||||
data = _get("/upstreams")
|
||||
result = {}
|
||||
for item in data.get("list", []):
|
||||
val = item.get("value", item)
|
||||
uid = str(val.get("id", ""))
|
||||
result[uid] = val
|
||||
return result
|
||||
|
||||
|
||||
def collect_routes() -> list[dict]:
|
||||
"""Return list of route records for NocoDB infra_routes."""
|
||||
data = _get("/routes")
|
||||
upstreams = _upstream_map()
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
results = []
|
||||
|
||||
for item in data.get("list", []):
|
||||
val = item.get("value", item)
|
||||
route_id = str(val.get("id", ""))
|
||||
name = val.get("name", route_id)
|
||||
|
||||
hosts = val.get("hosts", val.get("host", []))
|
||||
if isinstance(hosts, str):
|
||||
hosts = [hosts]
|
||||
host = ", ".join(hosts) if hosts else ""
|
||||
|
||||
uri = val.get("uri", val.get("uris", ""))
|
||||
if isinstance(uri, list):
|
||||
uri = ", ".join(uri)
|
||||
|
||||
# resolve upstream nodes
|
||||
nodes = {}
|
||||
inline_upstream = val.get("upstream", {})
|
||||
if inline_upstream:
|
||||
nodes = inline_upstream.get("nodes", {})
|
||||
elif val.get("upstream_id"):
|
||||
up = upstreams.get(str(val["upstream_id"]), {})
|
||||
nodes = up.get("nodes", {})
|
||||
|
||||
if isinstance(nodes, list):
|
||||
nodes = {f'{n["host"]}:{n.get("port",80)}': n.get("weight", 1) for n in nodes}
|
||||
|
||||
plugins = list(val.get("plugins", {}).keys())
|
||||
|
||||
results.append({
|
||||
"Title": name or route_id,
|
||||
"host": host,
|
||||
"uri": uri,
|
||||
"upstream_nodes": json.dumps(nodes) if nodes else "",
|
||||
"plugins": ", ".join(plugins),
|
||||
"last_synced": now,
|
||||
})
|
||||
return results
|
||||
|
||||
|
||||
def collect_services() -> list[dict]:
|
||||
"""Derive infra_services entries from APISIX routes (gateway layer)."""
|
||||
routes = collect_routes()
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
services = []
|
||||
|
||||
for r in routes:
|
||||
host = r["host"]
|
||||
if not host:
|
||||
continue
|
||||
first_host = host.split(",")[0].strip()
|
||||
nodes_str = r["upstream_nodes"]
|
||||
|
||||
services.append({
|
||||
"Title": first_host,
|
||||
"display_name": r["Title"],
|
||||
"domain": first_host,
|
||||
"source": "apisix",
|
||||
"layer": "gateway",
|
||||
"status": "up",
|
||||
"upstream_ip": nodes_str,
|
||||
"cluster": "apisix-osaka",
|
||||
"last_seen": now,
|
||||
})
|
||||
return services
|
||||
Reference in New Issue
Block a user