Replace CSV traffic log with SQLite for better performance

- traffic_log.csv → traffic_log.db (SQLite with indexed timestamp)
- INSERT instead of CSV append, DELETE instead of file rewrite
- CLI queries use SQL (GROUP BY for traffic, LIMIT for log)
- retrain_from_log() uses read-only connection with time range query
- Config key: traffic_log_file → traffic_log_db

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
kaffa
2026-02-07 10:30:10 +09:00
parent 11c1ab0134
commit 3d1e353b1a
3 changed files with 147 additions and 132 deletions

View File

@@ -802,47 +802,41 @@ cmd_ai_retrain() {
}
cmd_ai_traffic() {
local log_file
log_file=$(python3 -c "
local db_file
db_file=$(python3 -c "
import yaml
with open('$CONFIG_FILE') as f:
cfg = yaml.safe_load(f)
print(cfg.get('ai',{}).get('traffic_log_file', '/var/lib/xdp-defense/traffic_log.csv'))
" 2>/dev/null || echo "/var/lib/xdp-defense/traffic_log.csv")
print(cfg.get('ai',{}).get('traffic_log_db', '/var/lib/xdp-defense/traffic_log.db'))
" 2>/dev/null || echo "/var/lib/xdp-defense/traffic_log.db")
[ ! -f "$log_file" ] && { log_err "Traffic log not found: $log_file"; exit 1; }
[ ! -f "$db_file" ] && { log_err "Traffic log not found: $db_file"; exit 1; }
python3 -c "
import csv, sys
import sqlite3, sys
from datetime import datetime, timedelta
log_file = sys.argv[1]
cutoff = datetime.now() - timedelta(hours=24)
db_file = sys.argv[1]
cutoff = (datetime.now() - timedelta(hours=24)).isoformat()
conn = sqlite3.connect(db_file)
cur = conn.cursor()
# Buckets: 0-6, 6-12, 12-18, 18-24
buckets = {0: [], 1: [], 2: [], 3: []}
total_samples = 0
with open(log_file, 'r') as f:
reader = csv.reader(f)
header = next(reader, None)
if header is None:
print('Traffic log is empty')
sys.exit(0)
for row in reader:
try:
ts = datetime.fromisoformat(row[0])
if ts < cutoff:
continue
hour = float(row[1])
bucket = min(int(hour // 6), 3)
# features: row[2]=hour_sin, row[3]=hour_cos, row[4]=total_packets, row[5]=total_bytes, ...
pps = float(row[4])
bps = float(row[5])
buckets[bucket].append((pps, bps))
total_samples += 1
except (ValueError, IndexError):
continue
cur.execute('SELECT hour, total_packets, total_bytes FROM traffic_samples WHERE timestamp >= ?', (cutoff,))
for row in cur.fetchall():
try:
hour = float(row[0])
bucket = min(int(hour // 6), 3)
pps = float(row[1])
bps = float(row[2])
buckets[bucket].append((pps, bps))
total_samples += 1
except (ValueError, TypeError):
continue
labels = ['00:00-06:00', '06:00-12:00', '12:00-18:00', '18:00-24:00']
print()
@@ -873,6 +867,8 @@ for i, label in enumerate(labels):
hours = total_samples * 5 / 3600 # 5s intervals
print(f'Total: {total_samples} samples ({hours:.1f}h)')
conn.close()
# Show next retrain time
import yaml, os, time
try:
@@ -894,62 +890,67 @@ try:
except:
pass
print()
" "$log_file"
" "$db_file"
}
cmd_ai_log() {
local n=${1:-20}
[[ "$n" =~ ^[0-9]+$ ]] || n=20
local log_file
log_file=$(python3 -c "
local db_file
db_file=$(python3 -c "
import yaml
with open('$CONFIG_FILE') as f:
cfg = yaml.safe_load(f)
print(cfg.get('ai',{}).get('traffic_log_file', '/var/lib/xdp-defense/traffic_log.csv'))
" 2>/dev/null || echo "/var/lib/xdp-defense/traffic_log.csv")
print(cfg.get('ai',{}).get('traffic_log_db', '/var/lib/xdp-defense/traffic_log.db'))
" 2>/dev/null || echo "/var/lib/xdp-defense/traffic_log.db")
[ ! -f "$log_file" ] && { log_err "Traffic log not found: $log_file"; exit 1; }
[ ! -f "$db_file" ] && { log_err "Traffic log not found: $db_file"; exit 1; }
python3 -c "
import csv, sys
import sqlite3, sys
log_file = sys.argv[1]
db_file = sys.argv[1]
n = int(sys.argv[2])
rows = []
with open(log_file, 'r') as f:
reader = csv.reader(f)
header = next(reader, None)
if header is None:
print('Traffic log is empty')
sys.exit(0)
for row in reader:
rows.append(row)
conn = sqlite3.connect(db_file)
cur = conn.cursor()
cur.execute('SELECT COUNT(*) FROM traffic_samples')
total_count = cur.fetchone()[0]
if total_count == 0:
print('Traffic log is empty')
conn.close()
sys.exit(0)
cur.execute('SELECT timestamp, hour, total_packets, total_bytes, syn_ratio, udp_ratio, icmp_ratio FROM traffic_samples ORDER BY id DESC LIMIT ?', (n,))
rows = cur.fetchall()
rows.reverse()
conn.close()
# Show last N rows
display = rows[-n:]
print()
print('\033[1m=== Recent Traffic Log ===\033[0m')
print(f'{\"Timestamp\":>22} {\"Hour\":>6} {\"PPS\":>10} {\"Bytes\":>12} {\"SYN%\":>6} {\"UDP%\":>6} {\"ICMP%\":>6}')
print(f'{\"-\"*22} {\"-\"*6} {\"-\"*10} {\"-\"*12} {\"-\"*6} {\"-\"*6} {\"-\"*6}')
for row in display:
for row in rows:
try:
ts = row[0][:19] # trim microseconds
ts = str(row[0])[:19] # trim microseconds
hour = float(row[1])
pkts = float(row[4])
bts = float(row[5])
syn_r = float(row[14]) * 100 if len(row) > 14 else 0
udp_r = float(row[15]) * 100 if len(row) > 15 else 0
icmp_r = float(row[16]) * 100 if len(row) > 16 else 0
pkts = float(row[2])
bts = float(row[3])
syn_r = float(row[4]) * 100 if row[4] is not None else 0
udp_r = float(row[5]) * 100 if row[5] is not None else 0
icmp_r = float(row[6]) * 100 if row[6] is not None else 0
print(f'{ts:>22} {hour:>6.1f} {pkts:>10.0f} {bts:>12.0f} {syn_r:>5.1f}% {udp_r:>5.1f}% {icmp_r:>5.1f}%')
except (ValueError, IndexError):
except (ValueError, TypeError):
continue
print(f'Showing {len(display)} of {len(rows)} entries')
print(f'Showing {len(rows)} of {total_count} entries')
print()
" "$log_file" "$n"
" "$db_file" "$n"
}
# ==================== GeoIP ====================