Testing
Tests catch bugs before production. pytest is the standard - powerful, simple, well-documented.
pytest Basics
Installation
uv add --dev pytest pytest-cov pytest-asyncio
First Test
# test_validation.py
from netapi.utils.validation import validate_ip, validate_mac
def test_valid_ip():
assert validate_ip("10.50.1.20") == True
def test_invalid_ip():
assert validate_ip("invalid") == False
assert validate_ip("256.1.1.1") == False
assert validate_ip("10.50.1") == False
def test_valid_mac():
assert validate_mac("00:11:22:33:44:55") == True
assert validate_mac("00:11:22:33:44:FF") == True
def test_invalid_mac():
assert validate_mac("invalid") == False
assert validate_mac("00:11:22:33:44") == False
Running Tests
# Run all tests
uv run pytest
# Run specific file
uv run pytest tests/test_validation.py
# Run specific test
uv run pytest tests/test_validation.py::test_valid_ip
# Verbose output
uv run pytest -v
# Stop on first failure
uv run pytest -x
# Show print statements
uv run pytest -s
# Run tests matching pattern
uv run pytest -k "ip" # Only tests with "ip" in name
Test Organization
project/
├── src/
│ └── netapi/
│ ├── __init__.py
│ ├── utils/
│ │ └── validation.py
│ └── vendors/
│ └── cisco/
│ └── ise.py
└── tests/
├── __init__.py
├── conftest.py # Shared fixtures
├── test_validation.py
└── vendors/
└── cisco/
└── test_ise.py
Assertions
Basic Assertions
def test_assertions():
# Equality
assert result == expected
assert result != unexpected
# Truthiness
assert is_valid
assert not is_invalid
# Membership
assert item in collection
assert item not in collection
# Type
assert isinstance(obj, dict)
# Comparison
assert value > 0
assert value >= minimum
assert start <= value <= end
Assertion Messages
def test_with_message():
result = process(data)
assert result is not None, f"Expected result for data: {data}"
assert result["status"] == "ok", f"Unexpected status: {result['status']}"
Expecting Exceptions
import pytest
def test_raises_value_error():
with pytest.raises(ValueError):
validate_ip("invalid")
def test_raises_with_message():
with pytest.raises(ValueError) as exc_info:
validate_ip("invalid")
assert "Invalid IP" in str(exc_info.value)
def test_raises_specific():
with pytest.raises(ValueError, match="Invalid IP format"):
validate_ip("invalid")
Approximate Comparisons
import pytest
def test_floating_point():
result = calculate_percentage(3, 7)
assert result == pytest.approx(42.857, rel=0.01) # 1% tolerance
assert result == pytest.approx(42.857, abs=0.01) # Absolute tolerance
def test_list_approx():
results = [1.001, 2.002, 3.003]
expected = [1.0, 2.0, 3.0]
assert results == pytest.approx(expected, rel=0.01)
Fixtures
Fixtures provide test data and setup/teardown.
Basic Fixtures
import pytest
@pytest.fixture
def sample_endpoint():
"""Return sample endpoint data."""
return {
"mac": "00:11:22:33:44:55",
"ip": "10.50.10.100",
"group": "Employees",
"status": "active"
}
def test_endpoint_mac(sample_endpoint):
assert sample_endpoint["mac"] == "00:11:22:33:44:55"
def test_endpoint_status(sample_endpoint):
assert sample_endpoint["status"] == "active"
Fixtures with Setup/Teardown
import pytest
@pytest.fixture
def temp_config(tmp_path):
"""Create temporary config file."""
config_path = tmp_path / "config.yaml"
config_path.write_text("hostname: ise-01\nport: 443")
yield config_path # Provide to test
# Cleanup after test (optional - tmp_path auto-cleans)
config_path.unlink(missing_ok=True)
def test_load_config(temp_config):
import yaml
data = yaml.safe_load(temp_config.read_text())
assert data["hostname"] == "ise-01"
Fixture Scopes
import pytest
# Run once per test function (default)
@pytest.fixture(scope="function")
def db_connection():
conn = create_connection()
yield conn
conn.close()
# Run once per test class
@pytest.fixture(scope="class")
def shared_client():
return create_client()
# Run once per module
@pytest.fixture(scope="module")
def api_client():
client = APIClient()
client.connect()
yield client
client.disconnect()
# Run once per session
@pytest.fixture(scope="session")
def docker_container():
container = start_container()
yield container
container.stop()
conftest.py
Shared fixtures across multiple test files:
# tests/conftest.py
import pytest
@pytest.fixture
def ise_config():
return {
"host": "ise-01.inside.domusdigitalis.dev",
"username": "admin",
"password": "test-password",
"verify_ssl": False
}
@pytest.fixture
def sample_endpoints():
return [
{"mac": "00:11:22:33:44:55", "group": "Employees"},
{"mac": "00:11:22:33:44:56", "group": "Guests"},
]
Parametrized Fixtures
import pytest
@pytest.fixture(params=["json", "yaml", "toml"])
def config_format(request):
return request.param
def test_config_loading(config_format):
# Test runs 3 times with different formats
config = load_config(f"config.{config_format}")
assert config is not None
Parametrized Tests
Basic Parametrization
import pytest
@pytest.mark.parametrize("ip,expected", [
("10.50.1.20", True),
("192.168.1.1", True),
("255.255.255.255", True),
("0.0.0.0", True),
("invalid", False),
("256.1.1.1", False),
("10.50.1", False),
("10.50.1.1.1", False),
])
def test_validate_ip(ip, expected):
assert validate_ip(ip) == expected
Multiple Parameters
@pytest.mark.parametrize("mac,normalized", [
("00:11:22:33:44:55", "00:11:22:33:44:55"),
("00:11:22:33:44:ff", "00:11:22:33:44:FF"),
("00-11-22-33-44-55", "00:11:22:33:44:55"),
])
def test_normalize_mac(mac, normalized):
assert normalize_mac(mac) == normalized
# Combine multiple parametrize decorators (cartesian product)
@pytest.mark.parametrize("host", ["ise-01", "ise-02"])
@pytest.mark.parametrize("port", [443, 9060])
def test_connect(host, port):
# Runs 4 times: (ise-01, 443), (ise-01, 9060), (ise-02, 443), (ise-02, 9060)
assert connect(host, port)
IDs for Clarity
@pytest.mark.parametrize("ip,expected", [
pytest.param("10.50.1.20", True, id="valid-private"),
pytest.param("8.8.8.8", True, id="valid-public"),
pytest.param("invalid", False, id="invalid-string"),
pytest.param("256.1.1.1", False, id="invalid-octet"),
], ids=str)
def test_validate_ip(ip, expected):
assert validate_ip(ip) == expected
Mocking
unittest.mock
from unittest.mock import Mock, patch, MagicMock
def test_with_mock():
# Create mock object
mock_client = Mock()
mock_client.get_endpoints.return_value = [{"mac": "00:11:22:33:44:55"}]
# Use in test
processor = EndpointProcessor(client=mock_client)
result = processor.process()
# Assert mock was called correctly
mock_client.get_endpoints.assert_called_once()
assert len(result) == 1
Patching
from unittest.mock import patch
# Patch module-level function
@patch("netapi.vendors.cisco.ise.httpx.Client")
def test_ise_client(mock_httpx):
mock_httpx.return_value.get.return_value.json.return_value = {"status": "ok"}
client = ISEClient("ise-01", "admin", "password")
result = client.get_status()
assert result["status"] == "ok"
# Patch as context manager
def test_with_context():
with patch("netapi.utils.time.sleep") as mock_sleep:
retry_operation()
assert mock_sleep.call_count == 3
# Patch object attribute
def test_patch_attribute():
with patch.object(ISEClient, "BASE_URL", "https://test:9060"):
client = ISEClient("test", "admin", "pass")
assert "test:9060" in client.url
pytest-mock
# pytest-mock provides cleaner syntax
def test_with_mocker(mocker):
mock_client = mocker.Mock()
mock_client.get_endpoints.return_value = []
mocker.patch("netapi.processor.get_client", return_value=mock_client)
result = process_endpoints()
assert result == []
Mocking HTTP Requests
import pytest
import httpx
from unittest.mock import Mock, patch
@pytest.fixture
def mock_response():
response = Mock(spec=httpx.Response)
response.status_code = 200
response.json.return_value = {"SearchResult": {"resources": []}}
response.is_success = True
return response
def test_get_endpoints(mock_response):
with patch("httpx.Client") as mock_client_class:
mock_client = Mock()
mock_client.get.return_value = mock_response
mock_client_class.return_value.__enter__ = Mock(return_value=mock_client)
mock_client_class.return_value.__exit__ = Mock(return_value=False)
client = ISEClient("ise-01", "admin", "pass")
endpoints = list(client.get_endpoints())
assert endpoints == []
mock_client.get.assert_called()
Async Testing
pytest-asyncio
import pytest
# Mark test as async
@pytest.mark.asyncio
async def test_async_fetch():
result = await fetch_data("ise-01")
assert result["status"] == "ok"
# Async fixture
@pytest.fixture
async def async_client():
client = AsyncISEClient("ise-01", "admin", "pass")
yield client
await client.close()
@pytest.mark.asyncio
async def test_with_async_fixture(async_client):
endpoints = await async_client.get_endpoints()
assert isinstance(endpoints, list)
Mocking Async
import pytest
from unittest.mock import AsyncMock, patch
@pytest.mark.asyncio
async def test_async_client():
mock_response = AsyncMock()
mock_response.json = AsyncMock(return_value={"status": "ok"})
with patch("httpx.AsyncClient.get", return_value=mock_response):
async with httpx.AsyncClient() as client:
response = await client.get("/endpoint")
data = await response.json()
assert data["status"] == "ok"
Coverage
Running with Coverage
# Basic coverage
uv run pytest --cov=netapi
# With report
uv run pytest --cov=netapi --cov-report=term-missing
# HTML report
uv run pytest --cov=netapi --cov-report=html
# Open htmlcov/index.html
# Fail if coverage below threshold
uv run pytest --cov=netapi --cov-fail-under=80
Coverage Configuration
# pyproject.toml
[tool.pytest.ini_options]
testpaths = ["tests"]
addopts = "-v --cov=netapi --cov-report=term-missing"
asyncio_mode = "auto"
[tool.coverage.run]
branch = true
source = ["netapi"]
omit = ["*/tests/*", "*/__pycache__/*"]
[tool.coverage.report]
exclude_lines = [
"pragma: no cover",
"if TYPE_CHECKING:",
"raise NotImplementedError",
]
Test Markers
Built-in Markers
import pytest
# Skip test
@pytest.mark.skip(reason="Not implemented yet")
def test_future_feature():
pass
# Skip conditionally
@pytest.mark.skipif(sys.platform == "win32", reason="Unix only")
def test_unix_feature():
pass
# Expected failure
@pytest.mark.xfail(reason="Known bug #123")
def test_known_bug():
assert broken_function() == expected
# Slow tests
@pytest.mark.slow
def test_integration():
pass
Custom Markers
# conftest.py
def pytest_configure(config):
config.addinivalue_line("markers", "integration: integration tests")
config.addinivalue_line("markers", "slow: slow tests")
# test file
@pytest.mark.integration
def test_real_api():
pass
# Run only integration tests
# uv run pytest -m integration
# Exclude slow tests
# uv run pytest -m "not slow"
Testing CLI
from click.testing import CliRunner
from netapi.cli.main import cli
def test_cli_version():
runner = CliRunner()
result = runner.invoke(cli, ["--version"])
assert result.exit_code == 0
assert "netapi" in result.output
def test_cli_endpoints():
runner = CliRunner()
result = runner.invoke(cli, ["ise", "endpoints", "--host", "ise-01"])
assert result.exit_code == 0
def test_cli_missing_option():
runner = CliRunner()
result = runner.invoke(cli, ["ise", "endpoints"])
assert result.exit_code != 0
assert "Missing option" in result.output
def test_cli_with_input():
runner = CliRunner()
result = runner.invoke(cli, ["login"], input="admin\npassword\n")
assert result.exit_code == 0
Next Module
Infrastructure Patterns - Real-world patterns from netapi.