@re-shell/cli
Version:
Full-stack development platform uniting microservices and microfrontends. Build complete applications with .NET (ASP.NET Core Web API, Minimal API), Java (Spring Boot, Quarkus, Micronaut, Vert.x), Rust (Actix-Web, Warp, Rocket, Axum), Python (FastAPI, Dja
1,630 lines (1,337 loc) • 122 kB
JavaScript
"use strict";
/**
* Comprehensive Pytest Configuration for Python Framework Templates
* Provides fixtures, coverage, async testing, and framework-specific test utilities
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.pytestConfigGenerator = exports.PytestConfigGenerator = void 0;
class PytestConfigGenerator {
generatePytestConfig(framework) {
return {
framework,
pythonVersion: '3.11',
enableAsync: true,
enableCoverage: true,
enableFixtures: true,
enableMocking: true,
enableBenchmarks: true,
enableParallel: true
};
}
generateConftestPy() {
return this.generateBaseConftest();
}
generateTestUtilities() {
return `"""
Comprehensive test utilities for Python framework testing
Provides common testing functions and assertion helpers
"""
import asyncio
import json
import os
import tempfile
import time
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
from unittest.mock import AsyncMock, Mock, patch
import pytest
# Test Data Generation Utilities
def generate_fake_data(data_type: str = "user") -> Dict[str, Any]:
"""Generate fake test data."""
fake_data = {
"user": {
"id": "550e8400-e29b-41d4-a716-446655440000",
"username": "testuser",
"email": "test@example.com",
"first_name": "Test",
"last_name": "User",
"is_active": True,
},
"post": {
"id": "550e8400-e29b-41d4-a716-446655440001",
"title": "Test Post",
"content": "Test content",
"author_id": "550e8400-e29b-41d4-a716-446655440000",
},
"product": {
"id": "550e8400-e29b-41d4-a716-446655440002",
"name": "Test Product",
"price": 99.99,
"description": "Test product description",
}
}
return fake_data.get(data_type, {})
def random_test_data(length: int = 10) -> List[Dict[str, Any]]:
"""Generate random test data."""
import random
import string
data = []
for i in range(length):
data.append({
"id": ''.join(random.choices(string.ascii_lowercase + string.digits, k=8)),
"name": f"Test Item {i}",
"value": random.randint(1, 100),
"active": random.choice([True, False]),
})
return data
# Factory Pattern Utilities
class TestFactory:
"""Factory for creating test objects."""
@staticmethod
def create_user(**kwargs):
"""Create test user."""
defaults = {
"username": "testuser",
"email": "test@example.com",
"is_active": True,
}
defaults.update(kwargs)
return defaults
@staticmethod
def create_admin_user(**kwargs):
"""Create test admin user."""
defaults = {
"username": "admin",
"email": "admin@example.com",
"is_active": True,
"is_admin": True,
"roles": ["admin", "user"],
}
defaults.update(kwargs)
return defaults
@staticmethod
def create_post(**kwargs):
"""Create test post."""
defaults = {
"title": "Test Post",
"content": "Test content",
"status": "published",
}
defaults.update(kwargs)
return defaults
def test_factory(object_type: str, **kwargs):
"""Factory function for test objects."""
factory_map = {
"user": TestFactory.create_user,
"admin": TestFactory.create_admin_user,
"post": TestFactory.create_post,
}
factory_func = factory_map.get(object_type)
if factory_func:
return factory_func(**kwargs)
else:
raise ValueError(f"Unknown object type: {object_type}")
# Database Testing Utilities
async def cleanup_database(db_connection):
"""Clean up database after tests."""
if hasattr(db_connection, 'execute'):
await db_connection.execute("DELETE FROM users WHERE email LIKE '%test%'")
await db_connection.execute("DELETE FROM posts WHERE title LIKE '%test%'")
await db_connection.commit()
async def seed_test_data(db_connection, data: List[Dict[str, Any]]):
"""Seed database with test data."""
for item in data:
# This would be framework-specific implementation
pass
async def reset_database(db_connection):
"""Reset database to clean state."""
await cleanup_database(db_connection)
# Recreate tables if needed
pass
async def rollback_transaction(db_connection):
"""Rollback database transaction."""
if hasattr(db_connection, 'rollback'):
await db_connection.rollback()
# HTTP Testing Utilities
def assert_status(response, expected_status: int):
"""Assert HTTP response status."""
actual_status = getattr(response, 'status_code', getattr(response, 'status', None))
assert actual_status == expected_status, f"Expected status {expected_status}, got {actual_status}"
def assert_json(response, expected_keys: List[str] = None):
"""Assert JSON response structure."""
if hasattr(response, 'json'):
data = response.json if callable(response.json) else response.json()
elif hasattr(response, 'get_json'):
data = response.get_json()
else:
data = json.loads(response.body if hasattr(response, 'body') else response.content)
assert isinstance(data, dict), "Response is not JSON object"
if expected_keys:
for key in expected_keys:
assert key in data, f"Missing key '{key}' in response"
return data
def assert_headers(response, expected_headers: Dict[str, str]):
"""Assert response headers."""
headers = getattr(response, 'headers', {})
for header, value in expected_headers.items():
assert header in headers, f"Missing header '{header}'"
assert headers[header] == value, f"Header '{header}': expected '{value}', got '{headers[header]}'"
def assert_content(response, expected_content: str):
"""Assert response content contains expected text."""
content = getattr(response, 'content', getattr(response, 'body', ''))
if isinstance(content, bytes):
content = content.decode('utf-8')
assert expected_content in content, f"Content does not contain '{expected_content}'"
# Authentication Testing Utilities
def create_test_user(username: str = "testuser", **kwargs) -> Dict[str, Any]:
"""Create test user data."""
return TestFactory.create_user(username=username, **kwargs)
def generate_test_token(payload: Dict[str, Any] = None) -> str:
"""Generate test JWT token."""
import jwt
default_payload = {
"user_id": "550e8400-e29b-41d4-a716-446655440000",
"username": "testuser",
"exp": int(time.time()) + 3600, # 1 hour
}
if payload:
default_payload.update(payload)
return jwt.encode(default_payload, "test-secret", algorithm="HS256")
async def authenticate_test_user(client, username: str = "testuser", password: str = "testpass"):
"""Authenticate test user and return token."""
login_data = {
"username": username,
"password": password,
}
# This would be framework-specific implementation
response = await client.post("/auth/login", json=login_data)
data = assert_json(response, ["token"])
return data["token"]
async def logout_test_user(client, token: str):
"""Logout test user."""
headers = {"Authorization": f"Bearer {token}"}
response = await client.post("/auth/logout", headers=headers)
assert_status(response, 200)
# File Testing Utilities
def create_test_file(filename: str = "test_file.txt", content: bytes = b"test content") -> Path:
"""Create temporary test file."""
temp_dir = Path(tempfile.mkdtemp())
file_path = temp_dir / filename
file_path.write_bytes(content)
return file_path
async def upload_test_file(client, file_path: Path, endpoint: str = "/upload"):
"""Upload test file."""
with open(file_path, "rb") as f:
files = {"file": (file_path.name, f, "text/plain")}
response = await client.post(endpoint, files=files)
return response
def cleanup_test_files(*file_paths: Path):
"""Clean up test files."""
for file_path in file_paths:
if file_path.exists():
if file_path.is_file():
file_path.unlink()
elif file_path.is_dir():
import shutil
shutil.rmtree(file_path)
# Performance Testing Utilities
def time_execution(func):
"""Decorator to time function execution."""
def wrapper(*args, **kwargs):
start_time = time.perf_counter()
result = func(*args, **kwargs)
end_time = time.perf_counter()
execution_time = end_time - start_time
print(f"Function {func.__name__} took {execution_time:.4f} seconds")
return result
return wrapper
def assert_performance(actual_time: float, max_time: float):
"""Assert performance within limits."""
assert actual_time <= max_time, f"Performance test failed: {actual_time:.4f}s > {max_time:.4f}s"
def memory_usage():
"""Get current memory usage."""
import psutil
process = psutil.Process()
return process.memory_info().rss / 1024 / 1024 # MB
# Schema Validation Utilities
def validate_schema(data: Dict[str, Any], schema: Dict[str, Any]) -> bool:
"""Validate data against schema."""
for field, field_type in schema.items():
if field not in data:
return False
if not isinstance(data[field], field_type):
return False
return True
def validate_response(response, schema: Dict[str, Any]) -> bool:
"""Validate response against schema."""
data = assert_json(response)
return validate_schema(data, schema)
def validate_model(model_data: Dict[str, Any], required_fields: List[str]) -> bool:
"""Validate model data has required fields."""
for field in required_fields:
if field not in model_data:
return False
return True
# Exception Testing Utilities
def assert_raises(exception_type: Exception, func, *args, **kwargs):
"""Assert function raises specific exception."""
try:
func(*args, **kwargs)
pytest.fail(f"Expected {exception_type.__name__} but no exception was raised")
except exception_type:
pass # Expected exception
except Exception as e:
pytest.fail(f"Expected {exception_type.__name__} but got {type(e).__name__}: {e}")
async def test_error_response(client, endpoint: str, expected_status: int):
"""Test error response from endpoint."""
response = await client.get(endpoint)
assert_status(response, expected_status)
data = assert_json(response, ["message"])
assert data["success"] is False
async def test_validation_error(client, endpoint: str, invalid_data: Dict[str, Any]):
"""Test validation error response."""
response = await client.post(endpoint, json=invalid_data)
assert_status(response, 422)
data = assert_json(response, ["errors"])
assert isinstance(data["errors"], (list, dict))
# Mock Utilities
def mock_database():
"""Create mock database connection."""
db_mock = AsyncMock()
db_mock.execute.return_value = None
db_mock.fetch_one.return_value = None
db_mock.fetch_all.return_value = []
db_mock.commit.return_value = None
db_mock.rollback.return_value = None
return db_mock
def patch_request():
"""Create mock request patch."""
request_mock = Mock()
request_mock.method = "GET"
request_mock.url = "http://testserver/test"
request_mock.headers = {}
request_mock.json = {}
return patch("request", request_mock)
def mock_external_api():
"""Mock external API calls."""
api_mock = AsyncMock()
api_mock.get.return_value = {"status": "success", "data": {}}
api_mock.post.return_value = {"status": "success", "id": "123"}
api_mock.put.return_value = {"status": "success"}
api_mock.delete.return_value = {"status": "success"}
return api_mock
# Async Testing Utilities
async def wait_for_condition(condition_func, timeout: float = 5.0, interval: float = 0.1):
"""Wait for condition to become true."""
start_time = asyncio.get_event_loop().time()
while True:
if await condition_func() if asyncio.iscoroutinefunction(condition_func) else condition_func():
return True
if asyncio.get_event_loop().time() - start_time > timeout:
return False
await asyncio.sleep(interval)
async def assert_eventually(assertion_func, timeout: float = 5.0, interval: float = 0.1):
"""Assert condition eventually becomes true."""
success = await wait_for_condition(assertion_func, timeout, interval)
if not success:
pytest.fail(f"Condition did not become true within {timeout} seconds")
# Skip conditions and retry mechanisms
def skip_condition(condition: bool, reason: str):
"""Skip test based on condition."""
return pytest.mark.skipif(condition, reason=reason)
def retry_on_failure(max_retries: int = 3, delay: float = 1.0):
"""Retry test on failure."""
def decorator(func):
async def wrapper(*args, **kwargs):
last_exception = None
for attempt in range(max_retries):
try:
return await func(*args, **kwargs)
except Exception as e:
last_exception = e
if attempt < max_retries - 1:
await asyncio.sleep(delay)
continue
raise last_exception
return wrapper
return decorator
`;
}
generatePyprojectConfig() {
return `[tool.pytest.ini_options]
# Pytest configuration for comprehensive testing
minversion = "7.0"
python_files = ["test_*.py", "*_test.py", "tests.py"]
python_classes = ["Test*", "*Test", "*Tests"]
python_functions = ["test_*"]
testpaths = ["tests", "test"]
addopts = [
"-ra",
"--strict-markers",
"--strict-config",
"--cov=app",
"--cov=src",
"--cov=api",
"--cov=models",
"--cov=schemas",
"--cov=routers",
"--cov=blueprints",
"--cov=handlers",
"--cov=middleware",
"--cov=core",
"--cov-report=term-missing",
"--cov-report=html:htmlcov",
"--cov-report=xml:coverage.xml",
"--cov-report=json:coverage.json",
"--cov-fail-under=85",
"--tb=short",
"--disable-warnings",
"-p no:warnings",
"--maxfail=5",
"--verbose",
]
# Async testing configuration
asyncio_mode = "auto"
asyncio_default_fixture_loop_scope = "function"
# Test markers for categorization
markers = [
"unit: Unit tests that test individual components in isolation",
"integration: Integration tests that test component interactions",
"e2e: End-to-end tests that test complete user workflows",
"smoke: Smoke tests for basic functionality verification",
"regression: Regression tests for bug prevention",
"performance: Performance and benchmark tests",
"security: Security-related tests",
"slow: Tests that take a long time to run (deselect with '-m \"not slow\"')",
"fast: Quick tests that should always pass",
"database: Tests that require database access",
"network: Tests that require network access",
"auth: Authentication and authorization tests",
"api: API endpoint tests",
"models: Database model tests",
"views: View/handler tests",
"forms: Form validation tests",
"utils: Utility function tests",
"mock: Tests using mocks/stubs",
"parametrized: Parametrized tests with multiple inputs",
"fixture: Tests demonstrating fixture usage",
]
# Test discovery patterns
norecursedirs = [
".*",
"build",
"dist",
"*.egg",
"venv",
".venv",
"env",
".env",
"__pycache__",
".git",
".tox",
"node_modules",
"migrations",
"static",
"media",
"htmlcov",
".coverage*",
]
# Filter warnings
filterwarnings = [
"error",
"ignore::UserWarning",
"ignore::DeprecationWarning",
"ignore::PendingDeprecationWarning",
"ignore::ImportWarning",
"ignore::ResourceWarning",
"ignore:.*unclosed.*:ResourceWarning",
"ignore:.*aiohttp.*:DeprecationWarning",
"ignore:.*django.*:DeprecationWarning",
"ignore:.*flask.*:DeprecationWarning",
"ignore:.*tornado.*:DeprecationWarning",
"ignore:.*sanic.*:DeprecationWarning",
]
# Logging configuration for tests
log_auto_indent = true
log_cli = false
log_cli_level = "INFO"
log_cli_format = "%(asctime)s [%(levelname)8s] %(name)s: %(message)s"
log_cli_date_format = "%Y-%m-%d %H:%M:%S"
log_file = "tests/logs/pytest.log"
log_file_level = "DEBUG"
log_file_format = "%(asctime)s [%(levelname)8s] %(filename)s:%(lineno)d %(funcName)s(): %(message)s"
log_file_date_format = "%Y-%m-%d %H:%M:%S"
# Timeout configuration
timeout = 300
timeout_method = "thread"
[tool.coverage.run]
# Coverage configuration
source = ["app", "src", "api", "models", "schemas", "routers", "blueprints", "handlers", "middleware", "core"]
branch = true
parallel = true
concurrency = ["thread", "multiprocessing"]
omit = [
"*/migrations/*",
"*/tests/*",
"*/test/*",
"*/.venv/*",
"*/venv/*",
"*/env/*",
"*/.env/*",
"*/node_modules/*",
"*/static/*",
"*/media/*",
"*/htmlcov/*",
"*/__pycache__/*",
"*/.*",
"setup.py",
"manage.py",
"wsgi.py",
"asgi.py",
"conftest.py",
"*/conftest.py",
"*/settings/*",
"*/config/*",
]
# Include/exclude patterns
include = ["*/app/*", "*/src/*", "*/api/*"]
[tool.coverage.report]
# Coverage reporting
exclude_lines = [
"pragma: no cover",
"pragma: nocover",
"def __repr__",
"def __str__",
"if self.debug:",
"if settings.DEBUG",
"if DEBUG:",
"raise AssertionError",
"raise NotImplementedError",
"if 0:",
"if False:",
"if __name__ == .__main__.:",
"if TYPE_CHECKING:",
"class .*\\bProtocol\\):",
"@(abc\\.)?abstractmethod",
"@overload",
"except ImportError:",
"except ModuleNotFoundError:",
"pass",
"\\.\\.\\.",
]
show_missing = true
skip_covered = false
skip_empty = false
sort = "Cover"
precision = 2
[tool.coverage.html]
# HTML coverage reports
directory = "htmlcov"
title = "Test Coverage Report"
show_contexts = true
[tool.coverage.xml]
# XML coverage reports
output = "coverage.xml"
[tool.coverage.json]
# JSON coverage reports
output = "coverage.json"
show_contexts = true
[tool.pytest-benchmark]
# Benchmark configuration
only_run_marked = false
sort = "mean"
histogram = true
json = "benchmark.json"
compare_fail = ["min:10%", "max:10%", "mean:10%", "stddev:10%"]
timer = "time.perf_counter"
disable_gc = true
warmup = true
warmup_iterations = 100000
calibration_precision = 10
max_time = 1.0
min_rounds = 5
min_time = 0.000005
group_by = "group"
[tool.pytest-xdist]
# Parallel testing configuration
auto = true
dist = "worksteal"
tx = ["popen//python=python3.11"]
rsyncdir = ["app", "src", "tests"]
# Dependencies for testing
[build-system]
requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2"]
[tool.pytest.dependencies]
test = [
"pytest>=7.0.0",
"pytest-asyncio>=0.21.0",
"pytest-cov>=4.0.0",
"pytest-mock>=3.10.0",
"pytest-xdist>=3.0.0",
"pytest-benchmark>=4.0.0",
"pytest-timeout>=2.1.0",
"pytest-html>=3.1.0",
"coverage>=7.0.0",
"httpx>=0.24.0",
"aiohttp>=3.8.0",
"psutil>=5.9.0",
"PyJWT>=2.6.0",
]
`;
}
generateBaseConftest() {
return `"""
Base conftest.py for all Python framework templates
Provides common fixtures, utilities, and test configuration
"""
import asyncio
import os
import pytest
import tempfile
import shutil
from pathlib import Path
from typing import Any, AsyncGenerator, Dict, Generator, List, Optional
from unittest.mock import AsyncMock, MagicMock, Mock, patch
import json
import uuid
from datetime import datetime, timedelta
# Import testing utilities
from .test_utilities import (
generate_fake_data, random_test_data, test_factory,
cleanup_database, seed_test_data, reset_database, rollback_transaction,
assert_status, assert_json, assert_headers, assert_content,
create_test_user, generate_test_token, authenticate_test_user, logout_test_user,
create_test_file, upload_test_file, cleanup_test_files,
time_execution, assert_performance, memory_usage,
validate_schema, validate_response, validate_model,
assert_raises, test_error_response, test_validation_error,
mock_database, patch_request, mock_external_api
)
# Test configuration
pytest_plugins = [
"pytest_asyncio",
"pytest_mock",
"pytest_cov",
"pytest_benchmark",
"pytest_xdist",
"pytest_timeout",
"pytest_html",
]
# Environment setup
os.environ.setdefault("TESTING", "1")
os.environ.setdefault("TEST_DATABASE_URL", "sqlite:///:memory:")
os.environ.setdefault("TEST_REDIS_URL", "redis://localhost:6379/15")
@pytest.fixture(scope="session")
def event_loop():
"""Create an instance of the default event loop for the test session."""
loop = asyncio.new_event_loop()
yield loop
loop.close()
@pytest.fixture(scope="session")
def test_config() -> Dict[str, Any]:
"""Test configuration fixture."""
return {
"DEBUG": True,
"TESTING": True,
"SECRET_KEY": "test-secret-key-not-for-production",
"DATABASE_URL": "sqlite:///:memory:",
"REDIS_URL": "redis://localhost:6379/15",
"JWT_SECRET_KEY": "test-jwt-secret",
"JWT_ALGORITHM": "HS256",
"JWT_EXPIRATION_DELTA": timedelta(hours=1),
"UPLOAD_FOLDER": "/tmp/test_uploads",
"MAX_CONTENT_LENGTH": 16 * 1024 * 1024, # 16MB
"CORS_ORIGINS": ["http://localhost:3000"],
"RATE_LIMIT": "100/minute",
"LOG_LEVEL": "DEBUG",
}
@pytest.fixture(scope="function")
def temp_dir() -> Generator[Path, None, None]:
"""Create a temporary directory for tests."""
temp_path = Path(tempfile.mkdtemp())
yield temp_path
shutil.rmtree(temp_path, ignore_errors=True)
@pytest.fixture(scope="function")
def temp_file() -> Generator[Path, None, None]:
"""Create a temporary file for tests."""
fd, temp_path = tempfile.mkstemp()
os.close(fd)
path = Path(temp_path)
yield path
if path.exists():
path.unlink()
@pytest.fixture(scope="function")
def mock_uuid():
"""Mock UUID generation for consistent testing."""
test_uuid = "550e8400-e29b-41d4-a716-446655440000"
with patch("uuid.uuid4", return_value=uuid.UUID(test_uuid)):
yield test_uuid
@pytest.fixture(scope="function")
def mock_datetime():
"""Mock datetime for consistent testing."""
fixed_time = datetime(2024, 1, 1, 12, 0, 0)
with patch("datetime.datetime") as mock_dt:
mock_dt.now.return_value = fixed_time
mock_dt.utcnow.return_value = fixed_time
mock_dt.side_effect = lambda *args, **kw: datetime(*args, **kw)
yield fixed_time
@pytest.fixture(scope="function")
def sample_user_data() -> Dict[str, Any]:
"""Sample user data for testing."""
return {
"id": "550e8400-e29b-41d4-a716-446655440000",
"username": "testuser",
"email": "test@example.com",
"first_name": "Test",
"last_name": "User",
"is_active": True,
"is_verified": True,
"roles": ["user"],
"permissions": ["read", "write"],
"created_at": "2024-01-01T12:00:00Z",
"updated_at": "2024-01-01T12:00:00Z",
}
@pytest.fixture(scope="function")
def sample_post_data() -> Dict[str, Any]:
"""Sample blog post data for testing."""
return {
"id": "550e8400-e29b-41d4-a716-446655440001",
"title": "Test Blog Post",
"content": "This is a test blog post content.",
"slug": "test-blog-post",
"status": "published",
"author_id": "550e8400-e29b-41d4-a716-446655440000",
"category_id": "550e8400-e29b-41d4-a716-446655440002",
"tags": ["test", "blog", "python"],
"meta_title": "Test Blog Post - SEO Title",
"meta_description": "Test blog post for SEO testing",
"featured_image": "https://example.com/image.jpg",
"published_at": "2024-01-01T12:00:00Z",
"created_at": "2024-01-01T12:00:00Z",
"updated_at": "2024-01-01T12:00:00Z",
}
@pytest.fixture(scope="function")
def sample_api_response() -> Dict[str, Any]:
"""Sample API response data for testing."""
return {
"success": True,
"data": {"message": "Operation successful"},
"message": "Request completed successfully",
"timestamp": "2024-01-01T12:00:00Z",
"request_id": "550e8400-e29b-41d4-a716-446655440003",
}
@pytest.fixture(scope="function")
def sample_pagination() -> Dict[str, Any]:
"""Sample pagination data for testing."""
return {
"page": 1,
"limit": 20,
"total": 100,
"pages": 5,
"has_next": True,
"has_prev": False,
}
@pytest.fixture(scope="function")
def mock_redis():
"""Mock Redis client for testing."""
redis_mock = AsyncMock()
redis_mock.get.return_value = None
redis_mock.set.return_value = True
redis_mock.delete.return_value = 1
redis_mock.exists.return_value = True
redis_mock.expire.return_value = True
redis_mock.ttl.return_value = 3600
redis_mock.keys.return_value = []
redis_mock.flushdb.return_value = True
return redis_mock
@pytest.fixture(scope="function")
def mock_database():
"""Mock database connection for testing."""
db_mock = AsyncMock()
db_mock.execute.return_value = None
db_mock.fetch_one.return_value = None
db_mock.fetch_all.return_value = []
db_mock.begin.return_value = AsyncMock()
db_mock.commit.return_value = None
db_mock.rollback.return_value = None
db_mock.close.return_value = None
return db_mock
@pytest.fixture(scope="function")
def mock_file_upload():
"""Mock file upload for testing."""
file_mock = Mock()
file_mock.filename = "test_file.txt"
file_mock.content_type = "text/plain"
file_mock.size = 1024
file_mock.read.return_value = b"test file content"
file_mock.seek.return_value = None
return file_mock
@pytest.fixture(scope="function")
def mock_email_service():
"""Mock email service for testing."""
email_mock = AsyncMock()
email_mock.send_email.return_value = True
email_mock.send_verification_email.return_value = True
email_mock.send_password_reset_email.return_value = True
email_mock.send_notification_email.return_value = True
return email_mock
@pytest.fixture(scope="function")
def mock_auth_service():
"""Mock authentication service for testing."""
auth_mock = AsyncMock()
auth_mock.authenticate.return_value = True
auth_mock.generate_token.return_value = "test-jwt-token"
auth_mock.verify_token.return_value = {"user_id": "550e8400-e29b-41d4-a716-446655440000"}
auth_mock.hash_password.return_value = "hashed_password"
auth_mock.verify_password.return_value = True
return auth_mock
@pytest.fixture(scope="function")
def mock_storage_service():
"""Mock storage service for testing."""
storage_mock = AsyncMock()
storage_mock.upload_file.return_value = "https://example.com/uploaded-file.jpg"
storage_mock.delete_file.return_value = True
storage_mock.get_file_url.return_value = "https://example.com/file.jpg"
storage_mock.file_exists.return_value = True
return storage_mock
@pytest.fixture(scope="function")
def mock_cache_service():
"""Mock cache service for testing."""
cache_mock = AsyncMock()
cache_mock.get.return_value = None
cache_mock.set.return_value = True
cache_mock.delete.return_value = True
cache_mock.clear.return_value = True
cache_mock.exists.return_value = False
return cache_mock
@pytest.fixture(scope="function")
def mock_task_queue():
"""Mock task queue for testing."""
queue_mock = AsyncMock()
queue_mock.enqueue.return_value = "task-id-123"
queue_mock.get_task_status.return_value = "completed"
queue_mock.get_task_result.return_value = {"status": "success"}
return queue_mock
@pytest.fixture(scope="function")
def database():
"""Database fixture for testing."""
return mock_database()
@pytest.fixture(scope="function")
def mock_db():
"""Mock database utility fixture."""
db_mock = AsyncMock()
db_mock.execute.return_value = None
db_mock.fetch_one.return_value = None
db_mock.fetch_all.return_value = []
db_mock.commit.return_value = None
db_mock.rollback.return_value = None
db_mock.close.return_value = None
return db_mock
@pytest.fixture(scope="function")
async def async_database():
"""Async database fixture for testing."""
db_mock = AsyncMock()
db_mock.execute.return_value = None
db_mock.fetch_one.return_value = None
db_mock.fetch_all.return_value = []
db_mock.begin.return_value = AsyncMock()
db_mock.commit.return_value = None
db_mock.rollback.return_value = None
db_mock.close.return_value = None
yield db_mock
await db_mock.close()
@pytest.fixture(scope="function")
def client():
"""Generic test client fixture."""
class MockClient:
def __init__(self):
self.headers = {}
def get(self, url, **kwargs):
return Mock(status_code=200, json=lambda: {"success": True})
def post(self, url, **kwargs):
return Mock(status_code=201, json=lambda: {"success": True})
def put(self, url, **kwargs):
return Mock(status_code=200, json=lambda: {"success": True})
def delete(self, url, **kwargs):
return Mock(status_code=204)
return MockClient()
@pytest.fixture(scope="function")
async def async_client():
"""Generic async test client fixture."""
class MockAsyncClient:
def __init__(self):
self.headers = {}
async def get(self, url, **kwargs):
return Mock(status_code=200, json=lambda: {"success": True})
async def post(self, url, **kwargs):
return Mock(status_code=201, json=lambda: {"success": True})
async def put(self, url, **kwargs):
return Mock(status_code=200, json=lambda: {"success": True})
async def delete(self, url, **kwargs):
return Mock(status_code=204)
yield MockAsyncClient()
@pytest.fixture(scope="function")
def mock_user():
"""Mock user fixture."""
return {
"id": "550e8400-e29b-41d4-a716-446655440000",
"username": "testuser",
"email": "test@example.com",
"first_name": "Test",
"last_name": "User",
"is_active": True,
"roles": ["user"],
}
@pytest.fixture(scope="function")
def auth_headers(mock_user):
"""Authentication headers fixture."""
import jwt
import time
payload = {
"user_id": mock_user["id"],
"username": mock_user["username"],
"exp": int(time.time()) + 3600
}
token = jwt.encode(payload, "test-secret", algorithm="HS256")
return {"Authorization": f"Bearer {token}"}
@pytest.fixture(scope="function")
def sanic_client():
"""Sanic test client fixture."""
from sanic_testing import TestClient
from sanic import Sanic
app = Sanic("test_app")
return TestClient(app)
@pytest.fixture(scope="function")
def tornado_app():
"""Tornado app fixture."""
from tornado.web import Application
return Application([])
@pytest.fixture(scope="function")
def tornado_server():
"""Tornado server fixture."""
from tornado.testing import AsyncHTTPTestCase
class TornadoTestCase(AsyncHTTPTestCase):
def get_app(self):
from tornado.web import Application
return Application([])
test_case = TornadoTestCase()
test_case.setUp()
yield test_case
test_case.tearDown()
# Skip conditions
def test_skip_conditions():
"""Test skip conditions."""
return pytest.mark.skipif(True, reason="Conditional skip example")
# Conditional test execution
def test_conditional_execution():
"""Test conditional execution."""
import sys
return pytest.mark.skipif(sys.version_info < (3, 9), reason="Requires Python 3.9+")
# Test retry mechanism
@pytest.fixture(scope="function")
def test_retry():
"""Test retry mechanism."""
def retry_test(max_attempts=3):
def decorator(func):
def wrapper(*args, **kwargs):
last_exception = None
for attempt in range(max_attempts):
try:
return func(*args, **kwargs)
except Exception as e:
last_exception = e
if attempt < max_attempts - 1:
continue
raise
raise last_exception
return wrapper
return decorator
return retry_test
# Async test utilities
async def async_test_wrapper(coro):
"""Wrapper for async test functions."""
return await coro
def pytest_configure(config):
"""Pytest configuration hook."""
# Add custom markers
config.addinivalue_line("markers", "slow: mark test as slow running")
config.addinivalue_line("markers", "fast: mark test as fast running")
config.addinivalue_line("markers", "integration: mark test as integration test")
config.addinivalue_line("markers", "unit: mark test as unit test")
config.addinivalue_line("markers", "e2e: mark test as end-to-end test")
config.addinivalue_line("markers", "api: mark test as API test")
config.addinivalue_line("markers", "database: mark test as requiring database")
config.addinivalue_line("markers", "auth: mark test as authentication test")
config.addinivalue_line("markers", "performance: mark test as performance test")
config.addinivalue_line("markers", "security: mark test as security test")
def pytest_collection_modifyitems(config, items):
"""Modify collected test items."""
# Add slow marker to tests that take more than 1 second
for item in items:
if "slow" not in item.keywords and any(
keyword in item.name.lower()
for keyword in ["integration", "e2e", "database", "network"]
):
item.add_marker(pytest.mark.slow)
# Add fast marker to unit tests
if "unit" in item.keywords or "test_unit" in item.name:
item.add_marker(pytest.mark.fast)
@pytest.fixture(scope="function")
def assert_all_responses_recorded():
"""Ensure all HTTP responses are recorded in tests."""
def _assert_all_responses_recorded(responses):
assert len(responses.calls) > 0, "No HTTP calls were made"
for call in responses.calls:
assert call.response is not None, f"No response for {call.request.url}"
return _assert_all_responses_recorded
@pytest.fixture(scope="function")
def benchmark_config():
"""Configuration for benchmark tests."""
return {
"min_rounds": 3,
"max_time": 1.0,
"min_time": 0.000005,
"timer": "time.perf_counter",
"disable_gc": True,
"warmup": True,
}
class TestClient:
"""Base test client for framework testing."""
def __init__(self, app, base_url: str = "http://testserver"):
self.app = app
self.base_url = base_url
self.session = None
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
if self.session:
await self.session.close()
def set_auth_header(self, token: str):
"""Set authorization header for requests."""
if not hasattr(self, '_headers'):
self._headers = {}
self._headers['Authorization'] = f'Bearer {token}'
def clear_auth_header(self):
"""Clear authorization header."""
if hasattr(self, '_headers') and 'Authorization' in self._headers:
del self._headers['Authorization']
# Parametrized test data generators
@pytest.fixture(scope="session")
def user_test_cases():
"""Test cases for user-related tests."""
return [
# Valid cases
{"username": "validuser", "email": "valid@example.com", "valid": True},
{"username": "test123", "email": "test123@domain.co", "valid": True},
{"username": "user_name", "email": "user.name@test.org", "valid": True},
# Invalid cases
{"username": "", "email": "invalid@example.com", "valid": False},
{"username": "validuser", "email": "invalid-email", "valid": False},
{"username": "ab", "email": "valid@example.com", "valid": False}, # Too short
{"username": "a" * 51, "email": "valid@example.com", "valid": False}, # Too long
]
@pytest.fixture(scope="session")
def api_error_test_cases():
"""Test cases for API error handling."""
return [
{"status_code": 400, "error_type": "ValidationError", "expected_fields": ["message", "errors"]},
{"status_code": 401, "error_type": "AuthenticationError", "expected_fields": ["message"]},
{"status_code": 403, "error_type": "AuthorizationError", "expected_fields": ["message"]},
{"status_code": 404, "error_type": "NotFoundError", "expected_fields": ["message"]},
{"status_code": 422, "error_type": "ValidationError", "expected_fields": ["message", "errors"]},
{"status_code": 500, "error_type": "InternalServerError", "expected_fields": ["message"]},
]
@pytest.fixture(scope="session")
def pagination_test_cases():
"""Test cases for pagination testing."""
return [
{"page": 1, "limit": 10, "total": 100, "expected_pages": 10},
{"page": 1, "limit": 20, "total": 100, "expected_pages": 5},
{"page": 2, "limit": 25, "total": 100, "expected_pages": 4},
{"page": 1, "limit": 50, "total": 30, "expected_pages": 1},
{"page": 1, "limit": 10, "total": 0, "expected_pages": 0},
]
# Performance testing utilities
@pytest.fixture(scope="function")
def performance_monitor():
"""Monitor performance metrics during tests."""
import psutil
import time
class PerformanceMonitor:
def __init__(self):
self.start_time = None
self.start_memory = None
self.start_cpu = None
def start(self):
self.start_time = time.perf_counter()
self.start_memory = psutil.virtual_memory().used
self.start_cpu = psutil.cpu_percent()
def stop(self):
if self.start_time is None:
raise ValueError("Monitor not started")
end_time = time.perf_counter()
end_memory = psutil.virtual_memory().used
end_cpu = psutil.cpu_percent()
return {
"duration": end_time - self.start_time,
"memory_delta": end_memory - self.start_memory,
"cpu_usage": (self.start_cpu + end_cpu) / 2,
}
return PerformanceMonitor()
# Database test utilities
@pytest.fixture(scope="function")
def db_transaction_rollback():
"""Rollback database transactions after tests."""
transactions = []
def start_transaction(db):
tx = db.begin()
transactions.append(tx)
return tx
yield start_transaction
# Rollback all transactions
for tx in reversed(transactions):
try:
tx.rollback()
except Exception:
pass # Transaction might already be closed
# Security testing utilities
@pytest.fixture(scope="function")
def security_headers_check():
"""Check for security headers in responses."""
def _check_security_headers(response):
security_headers = [
"X-Content-Type-Options",
"X-Frame-Options",
"X-XSS-Protection",
"Strict-Transport-Security",
"Content-Security-Policy",
]
missing_headers = []
for header in security_headers:
if header not in response.headers:
missing_headers.append(header)
return {
"has_all_security_headers": len(missing_headers) == 0,
"missing_headers": missing_headers,
"present_headers": [h for h in security_headers if h in response.headers],
}
return _check_security_headers
# Async testing helpers
async def wait_for_condition(condition_func, timeout: float = 5.0, interval: float = 0.1):
"""Wait for a condition to become true with timeout."""
start_time = asyncio.get_event_loop().time()
while True:
if await condition_func() if asyncio.iscoroutinefunction(condition_func) else condition_func():
return True
if asyncio.get_event_loop().time() - start_time > timeout:
return False
await asyncio.sleep(interval)
async def assert_eventually(assertion_func, timeout: float = 5.0, interval: float = 0.1):
"""Assert that a condition eventually becomes true."""
success = await wait_for_condition(assertion_func, timeout, interval)
if not success:
raise AssertionError(f"Condition did not become true within {timeout} seconds")
`;
}
generateFastAPITestConfig() {
return `"""
FastAPI-specific test configuration and fixtures
"""
import pytest
import asyncio
from fastapi import FastAPI
from fastapi.testclient import TestClient
from httpx import AsyncClient
from typing import AsyncGenerator, Generator
from unittest.mock import AsyncMock, patch
# FastAPI test dependencies
@pytest.fixture(scope="session")
def fastapi_app() -> FastAPI:
"""Create FastAPI test application."""
from app.main import create_app
app = create_app()
app.dependency_overrides = {}
return app
@pytest.fixture(scope="function")
def test_client(fastapi_app: FastAPI) -> Generator[TestClient, None, None]:
"""Create FastAPI test client."""
with TestClient(fastapi_app) as client:
yield client
@pytest.fixture(scope="function")
async def async_client(fastapi_app: FastAPI) -> AsyncGenerator[AsyncClient, None]:
"""Create FastAPI async test client."""
async with AsyncClient(app=fastapi_app, base_url="http://testserver") as client:
yield client
@pytest.fixture(scope="function")
def override_dependencies(fastapi_app: FastAPI):
"""Override FastAPI dependencies for testing."""
def _override_dependency(dependency, override):
fastapi_app.dependency_overrides[dependency] = override
yield _override_dependency
# Clear overrides after test
fastapi_app.dependency_overrides.clear()
@pytest.fixture(scope="function")
def mock_current_user():
"""Mock current user dependency."""
from app.models.user import User
user = User(
id="550e8400-e29b-41d4-a716-446655440000",
username="testuser",
email="test@example.com",
is_active=True,
roles=["user"]
)
return user
@pytest.fixture(scope="function")
def mock_admin_user():
"""Mock admin user dependency."""
from app.models.user import User
user = User(
id="550e8400-e29b-41d4-a716-446655440001",
username="admin",
email="admin@example.com",
is_active=True,
roles=["admin", "user"]
)
return user
@pytest.fixture(scope="function")
def authenticated_client(test_client: TestClient, mock_current_user):
"""Test client with authenticated user."""
from app.core.auth import create_access_token
token = create_access_token(data={"sub": mock_current_user.username})
test_client.headers.update({"Authorization": f"Bearer {token}"})
return test_client
@pytest.fixture(scope="function")
async def authenticated_async_client(async_client: AsyncClient, mock_current_user):
"""Async test client with authenticated user."""
from app.core.auth import create_access_token
token = create_access_token(data={"sub": mock_current_user.username})
async_client.headers.update({"Authorization": f"Bearer {token}"})
return async_client
@pytest.fixture(scope="function")
def mock_database_session():
"""Mock database session for FastAPI."""
session_mock = AsyncMock()
session_mock.add.return_value = None
session_mock.commit.return_value = None
session_mock.rollback.return_value = None
session_mock.refresh.return_value = None
session_mock.close.return_value = None
session_mock.execute.return_value = AsyncMock()
session_mock.scalar.return_value = None
return session_mock
# FastAPI-specific test utilities
class FastAPITestUtils:
"""Utilities for FastAPI testing."""
@staticmethod
def assert_api_response(response, expected_status: int = 200):
"""Assert API response structure."""
assert response.status_code == expected_status
if response.headers.get("content-type", "").startswith("application/json"):
data = response.json()
assert "success" in data
assert "timestamp" in data
if expected_status < 400:
assert data["success"] is True
assert "data" in data
else:
assert data["success"] is False
assert "message" in data
@staticmethod
def assert_validation_error(response, field_name: str = None):
"""Assert validation error response."""
assert response.status_code == 422
data = response.json()
assert "detail" in data
if field_name:
errors = data["detail"]
field_errors = [error for error in errors if error["loc"][-1] == field_name]
assert len(field_errors) > 0, f"No validation error for field '{field_name}'"
@staticmethod
def assert_authentication_required(response):
"""Assert authentication required response."""
assert response.status_code == 401
data = response.json()
assert data["success"] is False
assert "authentication" in data["message"].lower()
@staticmethod
def assert_authorization_failed(response):
"""Assert authorization failed response."""
assert response.status_code == 403
data = response.json()
assert data["success"] is False
assert "permission" in data["message"].lower() or "forbidden" in data["message"].lower()
@staticmethod
def assert_not_found(response):
"""Assert not found response."""
assert response.status_code == 404
data = response.json()
assert data["success"] is False
assert "not found" in data["message"].lower()
@pytest.fixture(scope="function")
def fastapi_utils():
"""FastAPI test utilities fixture."""
return FastAPITestUtils()
# Background task testing
@pytest.fixture(scope="function")
def mock_background_tasks():
"""Mock FastAPI background tasks."""
from fastapi import BackgroundTasks
tasks_mock = AsyncMock(spec=BackgroundTasks)
tasks_mock.add_task.return_value = None
return tasks_mock
# WebSocket testing
@pytest.fixture(scope="function")
def websocket_client(fastapi_app: FastAPI):
"""WebSocket test client."""
from fastapi.testclient import TestClient
client = TestClient(fastapi_app)
return client
@pytest.fixture(scope="function")
async def websocket_connection(websocket_client):
"""WebSocket connection for testing."""
with websocket_client.websocket_connect("/ws") as websocket:
yield websocket
# Dependency injection testing
@pytest.fixture(scope="function")
def dependency_overrides():
"""Manage dependency overrides."""
overrides = {}
def set_override(dependency, override):
overrides[dependency] = override
def get_overrides():
return overrides
def clear_overrides():
overrides.clear()
return {
"set": set_override,
"get": get_overrides,
"clear": clear_overrides,
}
# File upload testing
@pytest.fixture(scope="function")
def test_upload_file():
"""Test file for upload testing."""
import io
file_content = b"This is a test file content for upload testing."
file_obj = io.BytesIO(file_content)
file_obj.name = "test_file.txt"
return {
"file": ("test_file.txt", file_obj, "text/plain"),
"content": file_content,
}
# API versioning testing
@pytest.fixture(scope="function")
def versioned_client(test_client: TestClient):
"""Test client with API version headers."""
def _versioned_client(version: str = "v1"):
test_client.headers.update({"API-Version": version})
return test_client
return _versioned_client
# Rate limiting testing
@pytest.fixture(scope="function")
def mock_rate_limiter():
"""Mock rate limiter for testing."""
rate_limiter_mock = AsyncMock()
rate_limiter_mock.is_allowed.return_value = True
rate_limiter_mock.get_remaining.return_value = 99
rate_limiter_mock.get_reset_time.return_value = 3600
return rate_limiter_mock
# Caching testing
@pytest.fixture(scope="function")
def mock_cache():
"""Mock cache for FastAPI testing."""
cache_mock = AsyncMock()
cache_mock.get.return_value = None
cache_mock.set.return_value = True
cache_mock.delete.return_val