-
Notifications
You must be signed in to change notification settings - Fork 0
Open
Labels
enhancementNew feature or requestNew feature or request
Description
Implement 100% Test Coverage
🎯 Objective
Achieve 100% test coverage for the pytryfi library to ensure reliability, catch regressions early, and maintain code quality. This includes unit tests for all modules, integration tests for API interactions, and edge case testing.
📋 Background
Currently, pytryfi lacks comprehensive test coverage. Adding thorough tests will:
- Prevent regressions when adding new features
- Enable confident refactoring
- Serve as documentation for expected behavior
- Improve overall code quality
🔧 Implementation Plan
1. Set Up Testing Infrastructure
Create pytest.ini
[tool:pytest]
testpaths = tests
python_files = test_*.py
python_classes = Test*
python_functions = test_*
addopts =
--cov=pytryfi
--cov-report=html
--cov-report=term-missing
--cov-fail-under=100
-v
Create .coveragerc
[run]
source = pytryfi
omit =
*/tests/*
*/__pycache__/*
*/test_*
[report]
exclude_lines =
pragma: no cover
def __repr__
raise AssertionError
raise NotImplementedError
if __name__ == .__main__.:
if TYPE_CHECKING:
Update requirements-test.txt
pytest>=7.4.0
pytest-cov>=4.1.0
pytest-asyncio>=0.21.0
pytest-mock>=3.11.0
responses>=0.23.0
freezegun>=1.2.0
2. Create Test Structure
tests/
├── __init__.py
├── conftest.py # Shared fixtures
├── test_pytryfi.py # Main API tests
├── test_fi_pet.py # Pet class tests
├── test_fi_device.py # Device class tests
├── test_fi_base.py # Base station tests
├── test_fi_user.py # User class tests
├── test_query.py # GraphQL query tests
├── test_exceptions.py # Exception handling tests
├── test_led_colors.py # LED color enum tests
├── fixtures/ # Sample API responses
│ ├── login_response.json
│ ├── household_response.json
│ ├── pet_response.json
│ └── error_responses.json
└── integration/ # Integration tests
├── test_api_flow.py
└── test_error_scenarios.py
3. Create Comprehensive Fixtures
conftest.py
"""Shared test fixtures."""
import json
import pytest
from datetime import datetime
from unittest.mock import Mock, patch
import responses
@pytest.fixture
def mock_session():
"""Create a mock requests session."""
session = Mock()
session.post = Mock()
session.get = Mock()
session.headers = {}
return session
@pytest.fixture
def sample_login_response():
"""Sample successful login response."""
return {
"userId": "user123",
"sessionId": "session123",
"email": "test@example.com"
}
@pytest.fixture
def sample_pet_data():
"""Sample pet data."""
return {
"id": "pet123",
"name": "Max",
"breed": {"name": "Golden Retriever"},
"gender": "MALE",
"weight": 70,
"yearOfBirth": 2020,
"monthOfBirth": 3,
"dayOfBirth": 15,
"photos": {
"first": {
"image": {
"fullSize": "https://example.com/photo.jpg"
}
}
},
"device": {
"id": "device123",
"moduleId": "module123",
"info": {
"buildId": "1.0.0",
"batteryPercent": 75,
"isCharging": False
},
"operationParams": {
"ledEnabled": True,
"ledOffAt": None,
"mode": "NORMAL"
},
"ledColor": {
"name": "BLUE",
"hexCode": "#0000FF"
},
"lastConnectionState": {
"date": "2024-01-01T12:00:00Z",
"__typename": "ConnectedToCellular",
"signalStrengthPercent": 85
},
"nextLocationUpdateExpectedBy": "2024-01-01T13:00:00Z",
"availableLedColors": [
{"ledColorCode": "1", "hexCode": "#FF00FF", "name": "MAGENTA"},
{"ledColorCode": "2", "hexCode": "#0000FF", "name": "BLUE"}
]
}
}
@pytest.fixture
def sample_base_data():
"""Sample base station data."""
return {
"baseId": "base123",
"name": "Living Room",
"online": True,
"onlineQuality": {"chargingBase": "GOOD"},
"lastSeenAt": "2024-01-01T12:00:00Z",
"position": {
"latitude": 40.7128,
"longitude": -74.0060
}
}
@pytest.fixture
def sample_household_response():
"""Sample household API response."""
return {
"data": {
"getCurrentUserHouseholds": [
{
"household": {
"pets": [sample_pet_data()],
"bases": [sample_base_data()]
}
}
]
}
}
4. Test Main PyTryFi Class
test_pytryfi.py
"""Tests for main PyTryFi class."""
import pytest
from unittest.mock import Mock, patch, call
from pytryfi import PyTryFi
from pytryfi.exceptions import TryFiAuthError, TryFiConnectionError
class TestPyTryFi:
"""Test PyTryFi main class."""
def test_init_success(self, mock_session, sample_login_response, sample_household_response):
"""Test successful initialization."""
with patch('pytryfi.requests.Session', return_value=mock_session):
with patch('pytryfi.query.login', return_value=sample_login_response):
with patch('pytryfi.query.getHouseHolds', return_value=sample_household_response['data']['getCurrentUserHouseholds']):
api = PyTryFi("test@example.com", "password")
assert api.username == "test@example.com"
assert api.userId == "user123"
assert len(api.pets) == 1
assert len(api.bases) == 1
def test_init_auth_failure(self, mock_session):
"""Test initialization with auth failure."""
mock_session.post.side_effect = Exception("401 Unauthorized")
with patch('pytryfi.requests.Session', return_value=mock_session):
with pytest.raises(Exception):
PyTryFi("test@example.com", "wrongpassword")
def test_init_no_pets(self, mock_session, sample_login_response):
"""Test initialization with no pets."""
empty_household = {
"data": {
"getCurrentUserHouseholds": [{
"household": {"pets": [], "bases": []}
}]
}
}
with patch('pytryfi.requests.Session', return_value=mock_session):
with patch('pytryfi.query.login', return_value=sample_login_response):
with patch('pytryfi.query.getHouseHolds', return_value=empty_household['data']['getCurrentUserHouseholds']):
api = PyTryFi("test@example.com", "password")
assert len(api.pets) == 0
assert len(api.bases) == 0
def test_update_pets(self, mock_session):
"""Test updating pets."""
api = Mock()
api._pets = [Mock(), Mock()]
api._session = mock_session
PyTryFi.updatePets(api)
for pet in api._pets:
pet.updateAllDetails.assert_called_once_with(mock_session)
def test_get_pet_by_id(self):
"""Test getting pet by ID."""
api = Mock()
pet1 = Mock(petId="pet1")
pet2 = Mock(petId="pet2")
api._pets = [pet1, pet2]
result = PyTryFi.getPet(api, "pet2")
assert result == pet2
result = PyTryFi.getPet(api, "nonexistent")
assert result is None
def test_update_with_errors(self, mock_session):
"""Test update method with partial failures."""
api = Mock()
api._pets = [Mock()]
api._bases = [Mock()]
api.updateBases = Mock(side_effect=Exception("Base update failed"))
api.updatePets = Mock()
PyTryFi.update(api)
# Should still try to update pets even if bases fail
api.updatePets.assert_called_once()
5. Test Individual Classes
test_fi_pet.py
"""Tests for FiPet class."""
import pytest
from datetime import datetime
from unittest.mock import Mock, patch
from pytryfi.fiPet import FiPet
class TestFiPet:
"""Test FiPet class."""
def test_init(self):
"""Test pet initialization."""
pet = FiPet("pet123")
assert pet.petId == "pet123"
assert pet._name is None
assert pet._device is None
def test_set_pet_details(self, sample_pet_data):
"""Test setting pet details from JSON."""
pet = FiPet("pet123")
pet.setPetDetailsJSON(sample_pet_data)
assert pet.name == "Max"
assert pet.breed == "Golden Retriever"
assert pet.gender == "MALE"
assert pet.weight == 70
assert pet.yearOfBirth == 2020
assert pet.device is not None
assert pet.device.deviceId == "device123"
def test_set_pet_details_missing_data(self):
"""Test setting pet details with missing fields."""
incomplete_data = {
"name": "Max",
"device": {"id": "device123"}
}
pet = FiPet("pet123")
pet.setPetDetailsJSON(incomplete_data)
assert pet.name == "Max"
assert pet.breed is None
assert pet.weight is None
def test_current_location(self):
"""Test setting current location."""
pet = FiPet("pet123")
location_data = {
"__typename": "Rest",
"areaName": "Home",
"lastReportTimestamp": "2024-01-01T12:00:00Z",
"position": {
"latitude": 40.7128,
"longitude": -74.0060
},
"place": {
"name": "Home",
"address": "123 Main St"
},
"start": "2024-01-01T11:00:00Z"
}
pet.setCurrentLocation(location_data)
assert pet.activityType == "Rest"
assert pet.areaName == "Home"
assert pet.currLatitude == 40.7128
assert pet.currLongitude == -74.0060
assert pet.currPlaceName == "Home"
assert pet.currPlaceAddress == "123 Main St"
def test_stats_update(self, mock_session):
"""Test updating pet statistics."""
pet = FiPet("pet123")
stats_data = {
"dailyStat": {
"stepGoal": 5000,
"totalSteps": 3000,
"totalDistance": 2000
},
"weeklyStat": {
"stepGoal": 35000,
"totalSteps": 21000,
"totalDistance": 14000
},
"monthlyStat": {
"stepGoal": 150000,
"totalSteps": 90000,
"totalDistance": 60000
}
}
with patch('pytryfi.query.getCurrentPetStats', return_value=stats_data):
result = pet.updateStats(mock_session)
assert result is True
assert pet.dailySteps == 3000
assert pet.dailyGoal == 5000
assert pet.weeklySteps == 21000
assert pet.monthlySteps == 90000
def test_led_control(self, mock_session):
"""Test LED control methods."""
pet = FiPet("pet123")
pet._device = Mock(moduleId="module123")
# Test turn on LED
with patch('pytryfi.query.turnOnOffLed', return_value={"setDeviceLed": {}}):
result = pet.turnOnLed(mock_session)
assert result is True
# Test turn off LED
with patch('pytryfi.query.turnOnOffLed', return_value={"setDeviceLed": {}}):
result = pet.turnOffLed(mock_session)
assert result is True
# Test set LED color
with patch('pytryfi.query.setLedColor', return_value={"setDeviceLed": {}}):
result = pet.setLedColorCode(mock_session, 3)
assert result is True
def test_lost_mode(self, mock_session):
"""Test lost mode functionality."""
pet = FiPet("pet123")
# Test enable lost mode
with patch('pytryfi.query.setLostDogMode', return_value={"setPetMode": {}}):
result = pet.setLostDogMode(mock_session, "ENABLE")
assert result is True
# Test disable lost mode
with patch('pytryfi.query.setLostDogMode', return_value={"setPetMode": {}}):
result = pet.setLostDogMode(mock_session, "DISABLE")
assert result is True
6. Test Error Scenarios
test_error_scenarios.py
"""Test error handling scenarios."""
import pytest
import requests
from unittest.mock import Mock, patch
from pytryfi import PyTryFi
from pytryfi.exceptions import TryFiError
class TestErrorScenarios:
"""Test various error scenarios."""
def test_network_timeout(self):
"""Test handling of network timeouts."""
with patch('requests.Session.post', side_effect=requests.Timeout):
with pytest.raises(Exception):
PyTryFi("test@example.com", "password")
def test_invalid_json_response(self, mock_session):
"""Test handling of invalid JSON responses."""
mock_response = Mock()
mock_response.json.side_effect = ValueError("Invalid JSON")
mock_response.status_code = 200
mock_response.ok = True
mock_session.post.return_value = mock_response
with patch('pytryfi.requests.Session', return_value=mock_session):
with pytest.raises(Exception):
PyTryFi("test@example.com", "password")
def test_api_error_response(self, mock_session):
"""Test handling of API error responses."""
mock_response = Mock()
mock_response.json.return_value = {
"error": {"message": "Invalid credentials"}
}
mock_response.status_code = 200
mock_response.ok = True
mock_session.post.return_value = mock_response
with patch('pytryfi.requests.Session', return_value=mock_session):
with pytest.raises(Exception):
PyTryFi("test@example.com", "password")
def test_missing_required_fields(self, mock_session):
"""Test handling of missing required fields in response."""
mock_response = Mock()
mock_response.json.return_value = {"someField": "value"} # Missing userId
mock_response.status_code = 200
mock_response.ok = True
mock_session.post.return_value = mock_response
with patch('pytryfi.requests.Session', return_value=mock_session):
with pytest.raises(Exception):
PyTryFi("test@example.com", "password")
def test_pet_without_collar(self, mock_session, sample_login_response):
"""Test handling of pets without collars."""
household_data = [{
"household": {
"pets": [{
"id": "pet123",
"name": "Max",
"device": "None" # No collar
}],
"bases": []
}
}]
with patch('pytryfi.requests.Session', return_value=mock_session):
with patch('pytryfi.query.login', return_value=sample_login_response):
with patch('pytryfi.query.getHouseHolds', return_value=household_data):
api = PyTryFi("test@example.com", "password")
assert len(api.pets) == 0 # Pet without collar is ignored
7. Integration Tests
test_integration.py
"""Integration tests for complete flows."""
import pytest
from unittest.mock import patch
import responses
from pytryfi import PyTryFi
class TestIntegration:
"""Test complete integration flows."""
@responses.activate
def test_complete_initialization_flow(self):
"""Test complete initialization flow with mocked HTTP responses."""
# Mock login endpoint
responses.add(
responses.POST,
"https://api.tryfi.com/login",
json={"userId": "user123", "sessionId": "session123"},
status=200
)
# Mock GraphQL endpoint
responses.add(
responses.POST,
"https://api.tryfi.com/graphql",
json={
"data": {
"getCurrentUserHouseholds": [{
"household": {
"pets": [...], # Full pet data
"bases": [...] # Full base data
}
}]
}
},
status=200
)
# Initialize API
api = PyTryFi("test@example.com", "password")
# Verify initialization
assert api.userId == "user123"
assert len(responses.calls) >= 2 # Login + household query
def test_full_update_cycle(self):
"""Test a complete update cycle."""
# Create initialized API instance
api = Mock()
# ... setup mocks ...
# Run update
api.update()
# Verify all components updated
# ... assertions ...
8. Coverage Report Script
scripts/coverage.sh
#!/bin/bash
# Run tests with coverage report
echo "Running tests with coverage..."
pytest --cov=pytryfi --cov-report=html --cov-report=term-missing
echo "Opening coverage report..."
open htmlcov/index.html
# Check if we meet coverage threshold
coverage report --fail-under=100
if [ $? -eq 0 ]; then
echo "✅ Coverage goal met!"
else
echo "❌ Coverage below 100%"
exit 1
fi
📝 Benefits
- Reliability: Catch bugs before they reach production
- Confidence: Refactor without fear of breaking things
- Documentation: Tests serve as usage examples
- Quality: Maintain high code standards
- CI/CD: Enable automated testing in pipelines
🧪 Testing Strategy
- Unit Tests: Test each class/method in isolation
- Integration Tests: Test component interactions
- Error Tests: Test all error paths
- Edge Cases: Test boundary conditions
- Mocking: Mock external dependencies (API calls)
📋 Checklist
- Set up testing infrastructure
- Create test directory structure
- Write comprehensive fixtures
- Test PyTryFi main class (100%)
- Test FiPet class (100%)
- Test FiDevice class (100%)
- Test FiBase class (100%)
- Test FiUser class (100%)
- Test query module (100%)
- Test exceptions module (100%)
- Test LED colors enum (100%)
- Test error scenarios
- Write integration tests
- Add GitHub Actions workflow
- Document testing approach
- Add coverage badge to README
📊 Success Metrics
- 100% line coverage
- 100% branch coverage
- All edge cases tested
- All error paths tested
- Fast test execution (<30 seconds)
- Clear test names and documentation
🏷️ Labels
testing
, quality
, enhancement
Metadata
Metadata
Assignees
Labels
enhancementNew feature or requestNew feature or request