A tool to automate the migration of test suites to be compatible with pytest 8.3.4.
- Updates fixture docstrings to explicit format
- Removes empty usefixtures markers
- Updates exception handling to use except* syntax
- Adds tracemalloc support for thread-using fixtures
- Replaces tmpdir with tmp_path
- Updates parameterized tests
- Adds tracemalloc hooks to conftest.py files
- Adds proper cache cleanup in finally blocks
- Updates cache assertions with better error messages
- Ensures cache fixtures are properly torn down
- Handles cache hit/miss assertions
- Adds vector index cleanup and resource management
- Validates batch size operations
- Improves vector search assertions
- Adds garbage collection for resource cleanup
- Handles initialization and document operations
- Adds timeout limits to long-running tests
- Sets resource limits for memory usage
- Implements proper benchmarking context
- Tracks test duration and performance metrics
- Improves timing assertion messages
- Adds schema validation error handling
- Implements proper error type checking
- Updates schema assertions with detailed messages
- Handles various schema validation scenarios
- Improves JSON and UTF-8 validation
- Updates request mocking patterns
- Adds proper response fixtures
- Improves authentication handling
- Enhances API client setup
- Updates HTTP assertions with better messages
- Adds service dependency management
- Implements retry logic for flaky tests
- Adds proper cleanup for resources
- Manages container lifecycle
- Updates integration markers
- Updates authentication test patterns
- Improves permission check handling
- Adds security fixtures
- Implements sensitive data cleanup
- Enhances security assertions
- Updates fixture scopes based on usage
- Adds explicit fixture dependencies
- Implements proper teardown using yield
- Manages autouse fixtures
- Improves resource cleanup
- Adds caplog fixture integration
- Updates log level management
- Enhances log message assertions
- Implements proper log cleanup
- Improves log capture handling
- Adds thread pool management
- Implements synchronization primitives
- Updates thread cleanup
- Enhances thread safety assertions
- Manages thread resources
# Using pip
pip install pytest-834-migrator
# Using poetry
poetry add pytest-834-migrator
# Run with dry-run to see what would change
pytest-migrate /path/to/tests --dry-run
# Run the actual migration
pytest-migrate /path/to/tests
After running the migration:
- Check the log file (
pytest_migration.log
) for details about changes made - Run your test suite with pytest 8.3.4:
pytest --version # Should show 8.3.4 pytest # Run tests
- Review the changes in your version control system:
git diff # If using git
The migrator automatically detects and updates several categories of tests:
- Tests with
@pytest.mark.cache
decorator - Functions starting with
test_cache_
- Tests using cache fixtures
- Tests with
@pytest.mark.vector
decorator - Functions starting with
test_vector_
- Tests for initialization, batch operations, etc.
- Tests with
@pytest.mark.performance
or@pytest.mark.benchmark
- Functions containing "performance" or "benchmark"
- Tests measuring timing or resource usage
- Tests with
@pytest.mark.schema
decorator - Functions testing JSON, UTF-8, field validation
- Schema compatibility and error handling tests
- Tests with
@pytest.mark.http
decorator - Functions testing API endpoints
- Tests using request/response mocking
- Tests with
@pytest.mark.integration
decorator - Functions testing service interactions
- Tests requiring external dependencies
- Tests with
@pytest.mark.security
decorator - Functions testing authentication/authorization
- Tests handling sensitive data
- Tests with complex fixture dependencies
- Functions using multiple fixtures
- Tests requiring specific scopes
- Tests with
@pytest.mark.logging
decorator - Functions testing log messages
- Tests using caplog fixture
- Tests with
@pytest.mark.thread_safe
decorator - Functions testing concurrent operations
- Tests requiring synchronization
- Python 3.9 or later (for native ast.unparse support)
- pytest 8.3.4
# Before
def test_cache_hit():
assert cache.get("key") == "value"
# After
def test_cache_hit():
try:
yield
finally:
cache.clear()
assert cache.get("key") == "value", "Cache assertion failed - see logs for details"
# Before
def test_vector_batch():
vector_index.add_batch(docs)
assert vector_index.size() == len(docs)
# After
@pytest.mark.vector
def test_vector_batch():
try:
yield
finally:
vector_index.clear()
vector_index.close()
gc.collect()
assert batch_size > 0, "Batch size must be positive"
assert batch_size <= max_batch_size, "Batch size exceeds maximum allowed"
vector_index.add_batch(docs)
assert vector_index.size() == len(docs), "Vector index operation failed - check logs for details"
# Before
def test_index_performance():
start = time.time()
index.build()
duration = time.time() - start
assert duration < 5.0
# After
@pytest.mark.timeout(300)
@pytest.mark.resource_limit(memory_mb=1024)
def test_index_performance():
start_time = time.perf_counter()
try:
yield
finally:
end_time = time.perf_counter()
duration = end_time - start_time
if "benchmark" in request.keywords:
benchmark.extra_info["duration"] = duration
assert duration < 5.0, "Performance threshold exceeded - see benchmark report"
# Before
def test_schema_validation():
validate_schema(data)
# After
@pytest.mark.schema
@pytest.mark.schema_errors
def test_schema_validation():
try:
with pytest.raises(SchemaValidationError) as exc_info:
yield
assert exc_info.type in EXPECTED_SCHEMA_ERRORS, f"Unexpected error type: {exc_info.type}"
except Exception as e:
pytest.fail(f"Schema validation failed with unexpected error: {e}")
validate_schema(data)
# Before
def test_api_endpoint():
response = client.get("/api/data")
assert response.status_code == 200
# After
@pytest.mark.http
def test_api_endpoint(api_client, responses):
responses.add(
responses.GET,
"/api/data",
json={"data": "test"},
status=200
)
response = api_client.get("/api/data")
assert response.status_code == 200, "HTTP response status code did not match expected value"
assert response.json() == {"data": "test"}, "Response content did not match expected value"
# Before
def test_service_integration():
service.start()
result = service.process()
assert result.success
# After
@pytest.mark.integration
@pytest.mark.flaky(max_runs=3, min_passes=1)
def test_service_integration(service_client):
try:
service_client.start()
result = service_client.process()
assert result.success, "Service integration failed - check service logs"
finally:
service_client.cleanup()
# Before
def test_user_auth():
user = authenticate(credentials)
assert user.is_authenticated
# After
@pytest.mark.security
def test_user_auth(auth_client, test_user, token_factory):
try:
token = token_factory.create_token(test_user)
user = auth_client.authenticate(token)
assert user.is_authenticated, "Authentication failed - verify credentials and token validity"
finally:
test_user.cleanup()
clear_sensitive_data()
# Before
def test_concurrent_access():
with ThreadPoolExecutor() as executor:
futures = [executor.submit(access_resource) for _ in range(10)]
results = [f.result() for f in futures]
assert all(results)
# After
@pytest.mark.thread_safe
def test_concurrent_access(thread_pool, thread_lock):
try:
with ThreadPoolExecutor(max_workers=4) as executor:
with thread_lock:
futures = [executor.submit(access_resource) for _ in range(10)]
results = [f.result() for f in futures]
assert all(results), "Thread safety violation detected - check synchronization"
finally:
executor.shutdown(wait=True)
cleanup_threads()