refactor: Improve code quality, error handling, and test coverage
- Add file_lock context manager to eliminate duplicate locking patterns - Add ValidationError, ConfigurationError, CertificateError exceptions - Improve rollback logic in haproxy_add_servers (track successful ops only) - Decompose haproxy_add_domain into smaller helper functions - Consolidate certificate constants (CERTS_DIR, ACME_HOME) to config.py - Enhance docstrings for internal functions and magic numbers - Add pytest framework with 48 new tests (269 -> 317 total) - Increase test coverage from 76% to 86% - servers.py: 58% -> 82% - certificates.py: 67% -> 86% - configuration.py: 69% -> 94% Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
130
tests/unit/test_utils.py
Normal file
130
tests/unit/test_utils.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""Unit tests for utils module."""
|
||||
|
||||
import pytest
|
||||
|
||||
from haproxy_mcp.utils import parse_stat_csv
|
||||
|
||||
|
||||
class TestParseStatCsv:
|
||||
"""Tests for parse_stat_csv function."""
|
||||
|
||||
def test_parse_valid_csv(self, response_builder):
|
||||
"""Parse valid HAProxy stat CSV output."""
|
||||
csv = response_builder.stat_csv([
|
||||
{"pxname": "pool_1", "svname": "pool_1_1", "scur": 5, "status": "UP", "weight": 1, "check_status": "L4OK"},
|
||||
{"pxname": "pool_1", "svname": "pool_1_2", "scur": 3, "status": "UP", "weight": 1, "check_status": "L4OK"},
|
||||
])
|
||||
|
||||
results = list(parse_stat_csv(csv))
|
||||
|
||||
assert len(results) == 2
|
||||
assert results[0]["pxname"] == "pool_1"
|
||||
assert results[0]["svname"] == "pool_1_1"
|
||||
assert results[0]["scur"] == "5"
|
||||
assert results[0]["status"] == "UP"
|
||||
assert results[0]["weight"] == "1"
|
||||
assert results[0]["check_status"] == "L4OK"
|
||||
|
||||
def test_parse_empty_output(self):
|
||||
"""Parse empty output returns no results."""
|
||||
results = list(parse_stat_csv(""))
|
||||
assert results == []
|
||||
|
||||
def test_parse_header_only(self):
|
||||
"""Parse output with only header returns no results."""
|
||||
csv = "# pxname,svname,qcur,qmax,scur,smax,..."
|
||||
results = list(parse_stat_csv(csv))
|
||||
assert results == []
|
||||
|
||||
def test_skip_comment_lines(self):
|
||||
"""Comment lines are skipped."""
|
||||
csv = """# This is a comment
|
||||
# Another comment
|
||||
pool_1,pool_1_1,0,0,5,10,0,0,0,0,0,0,0,0,0,0,0,UP,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,L4OK,"""
|
||||
|
||||
results = list(parse_stat_csv(csv))
|
||||
assert len(results) == 1
|
||||
assert results[0]["pxname"] == "pool_1"
|
||||
|
||||
def test_skip_empty_lines(self):
|
||||
"""Empty lines are skipped."""
|
||||
csv = """
|
||||
pool_1,pool_1_1,0,0,5,10,0,0,0,0,0,0,0,0,0,0,0,UP,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,L4OK,
|
||||
|
||||
pool_1,pool_1_2,0,0,3,10,0,0,0,0,0,0,0,0,0,0,0,UP,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,L4OK,
|
||||
"""
|
||||
|
||||
results = list(parse_stat_csv(csv))
|
||||
assert len(results) == 2
|
||||
|
||||
def test_parse_down_status(self, response_builder):
|
||||
"""Parse server with DOWN status."""
|
||||
csv = response_builder.stat_csv([
|
||||
{"pxname": "pool_1", "svname": "pool_1_1", "status": "DOWN", "check_status": "L4TOUT"},
|
||||
])
|
||||
|
||||
results = list(parse_stat_csv(csv))
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0]["status"] == "DOWN"
|
||||
assert results[0]["check_status"] == "L4TOUT"
|
||||
|
||||
def test_parse_maint_status(self, response_builder):
|
||||
"""Parse server with MAINT status."""
|
||||
csv = response_builder.stat_csv([
|
||||
{"pxname": "pool_1", "svname": "pool_1_1", "status": "MAINT"},
|
||||
])
|
||||
|
||||
results = list(parse_stat_csv(csv))
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0]["status"] == "MAINT"
|
||||
|
||||
def test_parse_multiple_backends(self, response_builder):
|
||||
"""Parse output with multiple backends."""
|
||||
csv = response_builder.stat_csv([
|
||||
{"pxname": "pool_1", "svname": "pool_1_1", "status": "UP"},
|
||||
{"pxname": "pool_2", "svname": "pool_2_1", "status": "UP"},
|
||||
{"pxname": "pool_3", "svname": "pool_3_1", "status": "DOWN"},
|
||||
])
|
||||
|
||||
results = list(parse_stat_csv(csv))
|
||||
|
||||
assert len(results) == 3
|
||||
assert results[0]["pxname"] == "pool_1"
|
||||
assert results[1]["pxname"] == "pool_2"
|
||||
assert results[2]["pxname"] == "pool_3"
|
||||
|
||||
def test_parse_frontend_backend_rows(self):
|
||||
"""Frontend and BACKEND rows are included."""
|
||||
csv = """pool_1,FRONTEND,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,UP,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,,
|
||||
pool_1,pool_1_1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,UP,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,L4OK,
|
||||
pool_1,BACKEND,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,UP,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,,"""
|
||||
|
||||
results = list(parse_stat_csv(csv))
|
||||
|
||||
# All rows with enough columns are returned
|
||||
assert len(results) == 3
|
||||
svnames = [r["svname"] for r in results]
|
||||
assert "FRONTEND" in svnames
|
||||
assert "pool_1_1" in svnames
|
||||
assert "BACKEND" in svnames
|
||||
|
||||
def test_parse_insufficient_columns(self):
|
||||
"""Rows with insufficient columns are skipped."""
|
||||
csv = "pool_1,pool_1_1,0,0,5" # Only 5 columns, need more than 17
|
||||
|
||||
results = list(parse_stat_csv(csv))
|
||||
assert results == []
|
||||
|
||||
def test_generator_is_lazy(self, response_builder):
|
||||
"""Verify parse_stat_csv returns a generator (lazy evaluation)."""
|
||||
csv = response_builder.stat_csv([
|
||||
{"pxname": "pool_1", "svname": "pool_1_1", "status": "UP"},
|
||||
])
|
||||
|
||||
result = parse_stat_csv(csv)
|
||||
|
||||
# Should return a generator, not a list
|
||||
import types
|
||||
assert isinstance(result, types.GeneratorType)
|
||||
Reference in New Issue
Block a user