Comprehensive guide to hyper2kvm testing infrastructure and practices
hyper2kvm maintains 100% test coverage with 664 tests covering unit, integration, and end-to-end scenarios. This guide explains our testing strategy, how to run tests, and how to write new tests.
tests/
βββ unit/ # Fast, isolated unit tests
β βββ test_cli/ # CLI argument parsing
β βββ test_converters/ # Format converters (VMDK, VHD)
β βββ test_fixers/ # Offline VM fixes
β βββ test_hooks/ # Hook execution
β βββ test_libvirt/ # Libvirt integration
β βββ test_manifest/ # Manifest handling
β βββ test_profiles/ # Profile system
β βββ test_utils/ # Utility functions
β βββ test_vmware/ # VMware-specific logic
β
βββ integration/ # Integration tests
β βββ test_batch_features/ # Batch migration
β βββ test_hooks/ # Hook integration
β βββ test_offline_operations/ # Offline VM operations
β βββ test_vsphere/ # vSphere integration
β
βββ fixtures/ # Test fixtures and helpers
β βββ fake_guestfs.py # Mock libguestfs interface
β βββ fake_logger.py # Mock logging
β
βββ fakes/ # Fake test data
β βββ images/ # Test VM images
β βββ create_test_images.py # Image generators
β
βββ pytest.ini # pytest configuration
# Install test dependencies
pip install pytest pytest-cov pytest-xdist pytest-timeout
# Optional: parallel execution
pip install pytest-xdist
# Run all tests
pytest
# Run only unit tests
pytest tests/unit/
# Run only integration tests
pytest tests/integration/
# Run with coverage report
pytest --cov=hyper2kvm --cov-report=html
# Run in parallel (faster)
pytest -n auto
# Run specific test file
pytest tests/unit/test_converters/test_vmdk_parser.py
# Run specific test class
pytest tests/unit/test_converters/test_vmdk_parser.py::TestVMDKParser
# Run specific test
pytest tests/unit/test_converters/test_vmdk_parser.py::TestVMDKParser::test_parse_descriptor
# Verbose output
pytest -v
# Show print statements
pytest -s
# Stop on first failure
pytest -x
# Run only failed tests from last run
pytest --lf
# Run with specific markers
pytest -m "not slow" # Skip slow tests
pytest -m "integration" # Only integration tests
# Run with timeout (prevents hanging)
pytest --timeout=300
# Generate XML report (for CI)
pytest --junitxml=test-results.xml
# Generate coverage badge
pytest --cov=hyper2kvm --cov-report=term-missing
# Parallel execution with live output
pytest -n auto -v --tb=short
Fast, isolated tests for individual components
# tests/unit/test_converters/test_vmdk_parser.py
def test_parse_vmdk_descriptor():
"""Test VMDK descriptor parsing"""
parser = VMDKParser()
descriptor = parser.parse_descriptor(vmdk_path)
assert descriptor["ddb.adapterType"] == "lsilogic"
assert descriptor["createType"] == "monolithicSparse"
# tests/unit/test_fixers/test_network_config_injector.py
def test_inject_network_config():
"""Test network configuration injection"""
g = FakeGuestFS()
obj = create_test_object(network_config=config)
result = inject_network_config(obj, g)
assert result["injected"] is True
assert len(result["files_created"]) == 2
# tests/unit/test_hooks/test_hook_runner.py
def test_hook_execution():
"""Test hook execution with retry"""
hook = {
"type": "script",
"path": "/path/to/hook.sh",
"retry": {"max_attempts": 3}
}
runner = HookRunner()
result = runner.execute_hook(hook, context)
assert result["success"] is True
Tests combining multiple components
# tests/integration/test_batch_features/test_batch_orchestrator.py
def test_parallel_batch_migration():
"""Test parallel VM migration"""
batch_manifest = create_batch_manifest(vm_count=5)
orchestrator = BatchOrchestrator()
results = orchestrator.run_batch(batch_manifest, parallel_limit=2)
assert len(results) == 5
assert all(r["status"] == "completed" for r in results)
# tests/integration/test_hooks/test_hook_integration.py
def test_pre_post_hooks():
"""Test pre and post hooks in pipeline"""
manifest = {
"hooks": {
"pre_fix": [{"type": "script", "path": "pre.sh"}],
"post_fix": [{"type": "script", "path": "post.sh"}]
}
}
pipeline = Pipeline(manifest)
result = pipeline.run()
assert result["hooks_executed"] == 2
# tests/unit/test_module/test_feature.py
"""Unit tests for feature X."""
import pytest
from hyper2kvm.module import Feature
class TestFeature:
"""Test suite for Feature functionality."""
def test_basic_operation(self):
"""Test basic feature operation."""
feature = Feature()
result = feature.do_something()
assert result is not None
assert result.status == "success"
def test_error_handling(self):
"""Test error handling."""
feature = Feature()
with pytest.raises(ValueError, match="Invalid input"):
feature.do_something(invalid_input=True)
def test_edge_case(self):
"""Test edge case handling."""
feature = Feature()
result = feature.do_something(edge_case=True)
assert result.handled_correctly is True
# tests/unit/conftest.py
import pytest
from tests.fixtures.fake_guestfs import FakeGuestFS
@pytest.fixture
def fake_guest():
"""Provide FakeGuestFS instance."""
g = FakeGuestFS()
g.fs["/etc/hostname"] = b"testhost\n"
return g
# In test file
def test_with_fixture(fake_guest):
"""Test using fake_guest fixture."""
hostname = fake_guest.read_file("/etc/hostname")
assert hostname == b"testhost\n"
@pytest.mark.parametrize("input,expected", [
("eth0", "network"),
("br0", "bridge"),
("bond0", "bond"),
])
def test_device_type_detection(input, expected):
"""Test device type detection with various inputs."""
device_type = detect_device_type(input)
assert device_type == expected
def test_with_temp_files(tmp_path):
"""Test using temporary directory."""
# tmp_path is a pytest fixture providing temp directory
test_file = tmp_path / "test.vmdk"
test_file.write_text("# Disk DescriptorFile\n")
result = parse_vmdk(test_file)
assert result is not None
# Cleanup automatic
Mock libguestfs for testing without real VMs:
from tests.fixtures.fake_guestfs import FakeGuestFS
def test_file_operations():
"""Test file operations on fake guest."""
g = FakeGuestFS()
# Write file
g.write("/etc/hostname", b"newhost\n")
# Read file
content = g.read_file("/etc/hostname")
assert content == b"newhost\n"
# Check file exists
assert g.exists("/etc/hostname")
assert g.is_file("/etc/hostname")
# Create directory
g.mkdir_p("/etc/systemd/network")
assert g.is_dir("/etc/systemd/network")
# Create symlink
g.ln_sf("/usr/lib/systemd/system/sshd.service",
"/etc/systemd/system/multi-user.target.wants/sshd.service")
assert g.exists("/etc/systemd/system/multi-user.target.wants/sshd.service")
Mock logger for testing without output:
from tests.fixtures.fake_logger import FakeLogger
def test_with_logging():
"""Test function with logging."""
logger = FakeLogger()
function_that_logs(logger)
# Verify logging calls (if needed)
assert logger.info.called
Create test VMDK images:
from tests.fakes.create_test_images import create_test_vmdk
def test_with_real_vmdk(tmp_path):
"""Test with generated VMDK."""
vmdk_path = tmp_path / "test.vmdk"
create_test_vmdk(
vmdk_path,
size_mb=100,
adapter_type="lsilogic",
create_type="monolithicSparse"
)
result = parse_vmdk(vmdk_path)
assert result["adapter_type"] == "lsilogic"
@pytest.mark.slow
def test_large_conversion():
"""Test that takes >5 seconds."""
pass
@pytest.mark.integration
def test_full_pipeline():
"""Integration test."""
pass
@pytest.mark.skipif(not VSPHERE_AVAILABLE, reason="vSphere not configured")
def test_vsphere_export():
"""Test requiring vSphere."""
pass
@pytest.mark.parametrize("input,expected", [...])
def test_with_params(input, expected):
"""Parametrized test."""
pass
# Skip slow tests
pytest -m "not slow"
# Run only integration tests
pytest -m integration
# Run only unit tests
pytest tests/unit/
# Run everything except vSphere tests
pytest -m "not vsphere"
# Terminal report
pytest --cov=hyper2kvm --cov-report=term-missing
# HTML report (interactive)
pytest --cov=hyper2kvm --cov-report=html
open htmlcov/index.html
# XML report (for CI)
pytest --cov=hyper2kvm --cov-report=xml
# Combined report
pytest --cov=hyper2kvm --cov-report=term-missing --cov-report=html
# pytest.ini or pyproject.toml
[tool:pytest]
addopts =
--cov=hyper2kvm
--cov-report=term-missing
--cov-report=html
--cov-fail-under=95
# View coverage summary
pytest --cov=hyper2kvm --cov-report=term
# Find uncovered lines
pytest --cov=hyper2kvm --cov-report=term-missing | grep "TOTAL"
# .github/workflows/tests.yml
name: Tests
on: [push, pull_request]
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.10', '3.11', '3.12']
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: $
- name: Install dependencies
run: |
pip install -e .[test]
- name: Run tests
run: |
pytest --cov=hyper2kvm --cov-report=xml
- name: Upload coverage
uses: codecov/codecov-action@v3
with:
file: ./coverage.xml
β DO:
β DONβT:
# Good test names
def test_parse_vmdk_descriptor_with_multiple_extents():
"""Test VMDK descriptor parsing with multiple extent entries."""
pass
def test_network_injection_creates_systemd_files():
"""Test that network injection creates systemd-networkd files."""
pass
# Bad test names
def test_vmdk(): # Too vague
pass
def test_1(): # Meaningless
pass
# Good: Clear, specific assertions
assert result["status"] == "success"
assert len(result["files"]) == 3
assert "error" not in result
# Bad: Vague assertions
assert result # What are we checking?
assert True # Meaningless
# Good: Each test independent
def test_create_user():
g = FakeGuestFS() # Fresh instance
create_user(g, "testuser")
assert g.exists("/home/testuser")
# Bad: Tests depend on each other
def test_step1():
global g
g = FakeGuestFS()
create_user(g, "testuser")
def test_step2(): # Depends on test_step1
assert g.exists("/home/testuser")
# Good: Test specific errors
with pytest.raises(ValueError, match="Invalid username"):
create_user(g, "invalid@user")
# Bad: Catch-all error testing
with pytest.raises(Exception):
create_user(g, "baduser")
# Drop into debugger on failure
pytest --pdb
# Drop into debugger on first failure, then stop
pytest -x --pdb
# Show local variables on failure
pytest --showlocals
# Full traceback
pytest --tb=long
# Show print statements
pytest -s
def test_with_debugging():
"""Test with breakpoint."""
result = function_under_test()
# Add breakpoint
import pdb; pdb.set_trace()
assert result is not None
import time
def test_performance():
"""Test that operation completes in reasonable time."""
start = time.time()
perform_expensive_operation()
duration = time.time() - start
assert duration < 5.0, f"Too slow: {duration}s"
import pytest
def test_batch_processing_performance(benchmark):
"""Benchmark batch processing."""
batch = create_test_batch(size=100)
result = benchmark(process_batch, batch)
assert result["processed"] == 100
def test_invalid_input_raises_error():
"""Test that invalid input raises ValueError."""
with pytest.raises(ValueError, match="Invalid format"):
parse_config(invalid_data)
import warnings
def test_deprecated_function_warns():
"""Test that deprecated function warns."""
with pytest.warns(DeprecationWarning):
old_function()
from unittest.mock import Mock, patch
def test_with_mock():
"""Test with mocked dependency."""
with patch('hyper2kvm.module.external_call') as mock_call:
mock_call.return_value = "mocked_result"
result = function_that_calls_external()
assert mock_call.called
assert result == "processed_mocked_result"
When updating code:
# Before refactoring
pytest --cov=hyper2kvm --cov-report=term > coverage_before.txt
# After refactoring
pytest --cov=hyper2kvm --cov-report=term > coverage_after.txt
# Compare
diff coverage_before.txt coverage_after.txt
Last Updated: 2026-01-23 Test Coverage: 100% (664 tests passing) Maintained by: hyper2kvm team