Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -108,3 +108,20 @@ venv.bak/

# VIM temp files.
*.swp

# Claude settings
.claude/*

# IDE files
.idea/
.vscode/
*.sublime-project
*.sublime-workspace

# Testing artifacts
test_output.txt
test_result.json
.test_cache/

# Poetry - DO NOT ignore lock file
# poetry.lock is intentionally tracked
1,282 changes: 1,282 additions & 0 deletions poetry.lock

Large diffs are not rendered by default.

101 changes: 101 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
[tool.poetry]
name = "python-project"
version = "0.1.0"
description = "Python project with testing infrastructure"
authors = ["Project Authors"]
readme = "README.md"
packages = [{include = "*", from = "."}]

[tool.poetry.dependencies]
python = "^3.8"
dateparser = "0.7.0"
flask = "1.0.2"
google-cloud-pubsub = "0.30.1"
google-cloud-storage = "1.10.0"
protobuf = "3.6.1"
pytz = "2017.3"
tabulate = "0.8.2"

[tool.poetry.group.dev.dependencies]
pytest = "^7.4.0"
pytest-cov = "^4.1.0"
pytest-mock = "^3.11.0"

[tool.poetry.scripts]
test = "pytest:main"
tests = "pytest:main"

[tool.pytest.ini_options]
minversion = "7.0"
addopts = [
"-ra",
"--strict-markers",
"--strict-config",
"--cov=.",
"--cov-branch",
"--cov-report=term-missing:skip-covered",
"--cov-report=html:htmlcov",
"--cov-report=xml:coverage.xml",
"--cov-fail-under=0",
"-vv"
]
testpaths = ["tests"]
python_files = ["test_*.py", "*_test.py"]
python_classes = ["Test*"]
python_functions = ["test_*"]
markers = [
"unit: marks tests as unit tests (fast, isolated)",
"integration: marks tests as integration tests (may require external resources)",
"slow: marks tests as slow running"
]
filterwarnings = [
"error",
"ignore::UserWarning",
"ignore::DeprecationWarning"
]

[tool.coverage.run]
source = ["."]
omit = [
"*/tests/*",
"*/__pycache__/*",
"*/venv/*",
"*/virtualenv/*",
"*/.venv/*",
"*/site-packages/*",
"*/dist-packages/*",
"setup.py",
"*/migrations/*",
"*/__init__.py",
"*/conftest.py"
]
branch = true

[tool.coverage.report]
exclude_lines = [
"pragma: no cover",
"def __repr__",
"if self.debug:",
"if settings.DEBUG",
"raise AssertionError",
"raise NotImplementedError",
"if 0:",
"if __name__ == .__main__.:",
"if TYPE_CHECKING:",
"class .*\\bProtocol\\):",
"@(abc\\.)?abstractmethod"
]
precision = 2
show_missing = true
skip_covered = false
fail_under = 0

[tool.coverage.html]
directory = "htmlcov"

[tool.coverage.xml]
output = "coverage.xml"

[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
Empty file added tests/__init__.py
Empty file.
114 changes: 114 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
import os
import shutil
import tempfile
from pathlib import Path
from typing import Generator, Dict, Any

import pytest


@pytest.fixture
def temp_dir() -> Generator[Path, None, None]:
"""Provide a temporary directory that is cleaned up after the test."""
temp_dir = tempfile.mkdtemp()
yield Path(temp_dir)
shutil.rmtree(temp_dir, ignore_errors=True)


@pytest.fixture
def temp_file(temp_dir: Path) -> Generator[Path, None, None]:
"""Provide a temporary file path within the temp directory."""
temp_file = temp_dir / "test_file.txt"
temp_file.touch()
yield temp_file


@pytest.fixture
def mock_config() -> Dict[str, Any]:
"""Provide a mock configuration dictionary."""
return {
"debug": True,
"database_url": "sqlite:///:memory:",
"api_key": "test-api-key",
"timeout": 30,
"retries": 3,
"environment": "test"
}


@pytest.fixture
def sample_data() -> Dict[str, Any]:
"""Provide sample data for testing."""
return {
"users": [
{"id": 1, "name": "Alice", "email": "alice@example.com"},
{"id": 2, "name": "Bob", "email": "bob@example.com"}
],
"products": [
{"id": 1, "name": "Product A", "price": 10.99},
{"id": 2, "name": "Product B", "price": 20.50}
]
}


@pytest.fixture
def mock_env_vars(monkeypatch) -> Dict[str, str]:
"""Mock environment variables for testing."""
env_vars = {
"TEST_ENV": "testing",
"DATABASE_URL": "sqlite:///:memory:",
"API_KEY": "test-key-123",
"DEBUG": "true"
}

for key, value in env_vars.items():
monkeypatch.setenv(key, value)

return env_vars


@pytest.fixture(autouse=True)
def reset_environment(monkeypatch):
"""Reset environment for each test to ensure isolation."""
# Clear any test-specific environment variables
test_env_vars = [k for k in os.environ.keys() if k.startswith("TEST_")]
for var in test_env_vars:
monkeypatch.delenv(var, raising=False)


@pytest.fixture
def mock_http_response():
"""Provide a mock HTTP response object."""
class MockResponse:
def __init__(self, json_data=None, status_code=200, text=""):
self.json_data = json_data
self.status_code = status_code
self.text = text
self.headers = {"Content-Type": "application/json"}

def json(self):
return self.json_data

def raise_for_status(self):
if self.status_code >= 400:
raise Exception(f"HTTP Error {self.status_code}")

return MockResponse


@pytest.fixture
def clean_test_artifacts():
"""Clean up test artifacts after test execution."""
yield
# Clean up any test files created during tests
test_artifacts = [
"test_output.txt",
"test_result.json",
".test_cache"
]
for artifact in test_artifacts:
if os.path.exists(artifact):
if os.path.isdir(artifact):
shutil.rmtree(artifact, ignore_errors=True)
else:
os.remove(artifact)
Empty file added tests/integration/__init__.py
Empty file.
123 changes: 123 additions & 0 deletions tests/test_infrastructure_validation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
import sys
import os
from pathlib import Path

import pytest


class TestInfrastructureValidation:
"""Validation tests to ensure the testing infrastructure is properly set up."""

def test_pytest_is_installed(self):
"""Verify pytest is installed and accessible."""
assert "pytest" in sys.modules

def test_pytest_cov_is_installed(self):
"""Verify pytest-cov is installed for coverage reporting."""
try:
import pytest_cov
assert pytest_cov is not None
except ImportError:
pytest.fail("pytest-cov is not installed")

def test_pytest_mock_is_installed(self):
"""Verify pytest-mock is installed for mocking utilities."""
try:
import pytest_mock
assert pytest_mock is not None
except ImportError:
pytest.fail("pytest-mock is not installed")

def test_test_directory_structure_exists(self):
"""Verify the test directory structure is properly created."""
test_root = Path(__file__).parent

assert test_root.exists()
assert test_root.is_dir()
assert (test_root / "__init__.py").exists()
assert (test_root / "conftest.py").exists()
assert (test_root / "unit").exists()
assert (test_root / "unit" / "__init__.py").exists()
assert (test_root / "integration").exists()
assert (test_root / "integration" / "__init__.py").exists()

@pytest.mark.unit
def test_unit_marker_works(self):
"""Verify the unit test marker is properly configured."""
assert True

@pytest.mark.integration
def test_integration_marker_works(self):
"""Verify the integration test marker is properly configured."""
assert True

@pytest.mark.slow
def test_slow_marker_works(self):
"""Verify the slow test marker is properly configured."""
assert True

def test_temp_dir_fixture(self, temp_dir):
"""Verify the temp_dir fixture works correctly."""
assert temp_dir.exists()
assert temp_dir.is_dir()

# Create a test file in the temp directory
test_file = temp_dir / "test.txt"
test_file.write_text("test content")

assert test_file.exists()
assert test_file.read_text() == "test content"

def test_temp_file_fixture(self, temp_file):
"""Verify the temp_file fixture works correctly."""
assert temp_file.exists()
assert temp_file.is_file()

# Write and read from the temp file
temp_file.write_text("test data")
assert temp_file.read_text() == "test data"

def test_mock_config_fixture(self, mock_config):
"""Verify the mock_config fixture provides expected configuration."""
assert isinstance(mock_config, dict)
assert "debug" in mock_config
assert "database_url" in mock_config
assert "api_key" in mock_config
assert mock_config["environment"] == "test"

def test_sample_data_fixture(self, sample_data):
"""Verify the sample_data fixture provides test data."""
assert "users" in sample_data
assert "products" in sample_data
assert len(sample_data["users"]) > 0
assert len(sample_data["products"]) > 0

def test_mock_env_vars_fixture(self, mock_env_vars):
"""Verify the mock_env_vars fixture sets environment variables."""
assert os.environ.get("TEST_ENV") == "testing"
assert os.environ.get("DATABASE_URL") == "sqlite:///:memory:"
assert os.environ.get("API_KEY") == "test-key-123"
assert os.environ.get("DEBUG") == "true"

def test_mock_http_response_fixture(self, mock_http_response):
"""Verify the mock_http_response fixture works correctly."""
# Test successful response
response = mock_http_response(
json_data={"status": "success"},
status_code=200
)

assert response.status_code == 200
assert response.json() == {"status": "success"}
response.raise_for_status() # Should not raise

# Test error response
error_response = mock_http_response(status_code=404)
with pytest.raises(Exception):
error_response.raise_for_status()

def test_coverage_configuration(self):
"""Verify coverage is properly configured."""
# This test will pass if coverage is running
# (which it should be based on our pytest configuration)
assert True
Empty file added tests/unit/__init__.py
Empty file.