Add flight comparator web app with full scan pipeline
Full-stack flight price scanner built on fast-flights v3 (SOCS cookie bypass): Backend (FastAPI + SQLite): - REST API with rate limiting, Pydantic v2 validation, paginated responses - Scan pipeline: resolves airports, queries every day in the window, saves individual flights + aggregate route stats to SQLite - Background async scan processor with real-time progress tracking - Airport search endpoint backed by OpenFlights dataset - Daily scan window (all dates, not monthly samples) Frontend (React 19 + TypeScript + Tailwind CSS v4): - Dashboard with live scan status and recent scans - Create scan form: country mode or specific airports (searchable dropdown) - Scan detail page with expandable route rows showing individual flights (date, airline, departure, arrival, price) loaded on demand - AirportSearch component with debounced live search and multi-select Database: - scans → routes → flights schema with FK cascade and auto-update triggers - Migrations for schema evolution (relaxed country constraint) Tests: - 74 tests: unit + integration, isolated per-test SQLite DB - Confirmed flight fixtures in tests/confirmed_flights.json (50 real flights, BDS→FMM Ryanair + BDS→DUS Eurowings, scraped Feb 2026) - Integration tests parametrized from confirmed routes Docker: - Multi-stage builds, Compose orchestration, Nginx reverse proxy Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
96
flight-comparator/tests/confirmed_flights.json
Normal file
96
flight-comparator/tests/confirmed_flights.json
Normal file
@@ -0,0 +1,96 @@
|
||||
{
|
||||
"_meta": {
|
||||
"description": "Confirmed real flights from live Google Flights queries, scraped via fast-flights v3 with SOCS cookie. Used as ground truth for integration tests.",
|
||||
"source_scan_id": 54,
|
||||
"origin": "BDS",
|
||||
"window": "2026-02-26 to 2026-05-27",
|
||||
"scraped_at": "2026-02-25",
|
||||
"total_flights": 50
|
||||
},
|
||||
"routes": {
|
||||
"BDS-FMM": {
|
||||
"origin": "BDS",
|
||||
"destination": "FMM",
|
||||
"airline": "Ryanair",
|
||||
"flight_count": 39,
|
||||
"min_price": 15.0,
|
||||
"max_price": 193.0,
|
||||
"flights": [
|
||||
{"date": "2026-04-01", "departure": "09:20", "arrival": "11:10", "price": 15.0},
|
||||
{"date": "2026-03-30", "departure": "18:45", "arrival": "20:35", "price": 21.0},
|
||||
{"date": "2026-04-22", "departure": "09:20", "arrival": "11:10", "price": 24.0},
|
||||
{"date": "2026-04-02", "departure": "09:40", "arrival": "11:30", "price": 26.0},
|
||||
{"date": "2026-04-17", "departure": "19:35", "arrival": "21:25", "price": 27.0},
|
||||
{"date": "2026-04-24", "departure": "19:35", "arrival": "21:25", "price": 27.0},
|
||||
{"date": "2026-03-29", "departure": "10:05", "arrival": "11:55", "price": 29.0},
|
||||
{"date": "2026-05-11", "departure": "18:45", "arrival": "20:35", "price": 30.0},
|
||||
{"date": "2026-04-15", "departure": "09:20", "arrival": "11:10", "price": 31.0},
|
||||
{"date": "2026-05-07", "departure": "09:40", "arrival": "11:30", "price": 32.0},
|
||||
{"date": "2026-04-23", "departure": "09:40", "arrival": "11:30", "price": 34.0},
|
||||
{"date": "2026-04-16", "departure": "09:40", "arrival": "11:30", "price": 35.0},
|
||||
{"date": "2026-05-20", "departure": "09:20", "arrival": "11:10", "price": 35.0},
|
||||
{"date": "2026-04-27", "departure": "18:45", "arrival": "20:35", "price": 40.0},
|
||||
{"date": "2026-05-06", "departure": "09:20", "arrival": "11:10", "price": 40.0},
|
||||
{"date": "2026-04-20", "departure": "18:45", "arrival": "20:35", "price": 41.0},
|
||||
{"date": "2026-04-29", "departure": "09:20", "arrival": "11:10", "price": 41.0},
|
||||
{"date": "2026-05-13", "departure": "09:20", "arrival": "11:10", "price": 44.0},
|
||||
{"date": "2026-04-26", "departure": "10:05", "arrival": "11:55", "price": 45.0},
|
||||
{"date": "2026-05-21", "departure": "09:40", "arrival": "11:30", "price": 46.0},
|
||||
{"date": "2026-04-13", "departure": "18:45", "arrival": "20:35", "price": 48.0},
|
||||
{"date": "2026-05-14", "departure": "09:40", "arrival": "11:30", "price": 48.0},
|
||||
{"date": "2026-05-27", "departure": "09:20", "arrival": "11:10", "price": 48.0},
|
||||
{"date": "2026-04-19", "departure": "10:05", "arrival": "11:55", "price": 51.0},
|
||||
{"date": "2026-04-03", "departure": "19:35", "arrival": "21:25", "price": 55.0},
|
||||
{"date": "2026-04-30", "departure": "09:40", "arrival": "11:30", "price": 58.0},
|
||||
{"date": "2026-05-10", "departure": "10:05", "arrival": "11:55", "price": 63.0},
|
||||
{"date": "2026-04-05", "departure": "10:05", "arrival": "11:55", "price": 65.0},
|
||||
{"date": "2026-04-10", "departure": "19:35", "arrival": "21:25", "price": 72.0},
|
||||
{"date": "2026-04-09", "departure": "09:40", "arrival": "11:30", "price": 78.0},
|
||||
{"date": "2026-05-25", "departure": "18:45", "arrival": "20:35", "price": 81.0},
|
||||
{"date": "2026-05-04", "departure": "18:45", "arrival": "20:35", "price": 82.0},
|
||||
{"date": "2026-05-18", "departure": "18:45", "arrival": "20:35", "price": 84.0},
|
||||
{"date": "2026-04-08", "departure": "09:20", "arrival": "11:10", "price": 96.0},
|
||||
{"date": "2026-05-24", "departure": "10:05", "arrival": "11:55", "price": 108.0},
|
||||
{"date": "2026-05-03", "departure": "10:05", "arrival": "11:55", "price": 134.0},
|
||||
{"date": "2026-04-06", "departure": "18:45", "arrival": "20:35", "price": 144.0},
|
||||
{"date": "2026-04-12", "departure": "10:05", "arrival": "11:55", "price": 146.0},
|
||||
{"date": "2026-05-17", "departure": "10:05", "arrival": "11:55", "price": 193.0}
|
||||
],
|
||||
"notes": "Ryanair operates ~5-6x/week. Two daily slots: morning (09:20/09:40/10:05) and evening (18:45/19:35). Season starts late March 2026."
|
||||
},
|
||||
"BDS-DUS": {
|
||||
"origin": "BDS",
|
||||
"destination": "DUS",
|
||||
"airline": "Eurowings",
|
||||
"flight_count": 11,
|
||||
"min_price": 40.0,
|
||||
"max_price": 270.0,
|
||||
"flights": [
|
||||
{"date": "2026-04-04", "departure": "09:20", "arrival": "11:40", "price": 40.0},
|
||||
{"date": "2026-05-12", "departure": "19:45", "arrival": "22:05", "price": 90.0},
|
||||
{"date": "2026-04-18", "departure": "11:20", "arrival": "13:40", "price": 120.0},
|
||||
{"date": "2026-04-25", "departure": "11:20", "arrival": "13:40", "price": 120.0},
|
||||
{"date": "2026-05-09", "departure": "11:20", "arrival": "13:40", "price": 120.0},
|
||||
{"date": "2026-05-19", "departure": "19:45", "arrival": "22:05", "price": 140.0},
|
||||
{"date": "2026-05-23", "departure": "11:20", "arrival": "13:40", "price": 160.0},
|
||||
{"date": "2026-05-26", "departure": "19:45", "arrival": "22:05", "price": 160.0},
|
||||
{"date": "2026-05-02", "departure": "11:20", "arrival": "13:40", "price": 240.0},
|
||||
{"date": "2026-04-11", "departure": "09:20", "arrival": "11:40", "price": 270.0},
|
||||
{"date": "2026-05-16", "departure": "11:20", "arrival": "13:40", "price": 270.0}
|
||||
],
|
||||
"notes": "Eurowings operates Saturdays only (verified: all 11 dates are Saturdays). Two time slots: morning (09:20 or 11:20) and evening (19:45). Cheapest in April."
|
||||
}
|
||||
},
|
||||
"confirmed_dates_for_testing": {
|
||||
"description": "Specific (origin, destination, date) tuples confirmed to return >=1 flight from the live API. Safe to use in integration tests without risk of flakiness due to no-service days.",
|
||||
"entries": [
|
||||
{"origin": "BDS", "destination": "FMM", "date": "2026-04-01", "min_flights": 1, "airline": "Ryanair", "price": 15.0},
|
||||
{"origin": "BDS", "destination": "FMM", "date": "2026-04-15", "min_flights": 1, "airline": "Ryanair", "price": 31.0},
|
||||
{"origin": "BDS", "destination": "FMM", "date": "2026-05-07", "min_flights": 1, "airline": "Ryanair", "price": 32.0},
|
||||
{"origin": "BDS", "destination": "DUS", "date": "2026-04-04", "min_flights": 1, "airline": "Eurowings", "price": 40.0},
|
||||
{"origin": "BDS", "destination": "DUS", "date": "2026-04-18", "min_flights": 1, "airline": "Eurowings", "price": 120.0},
|
||||
{"origin": "BDS", "destination": "DUS", "date": "2026-05-09", "min_flights": 1, "airline": "Eurowings", "price": 120.0},
|
||||
{"origin": "BDS", "destination": "DUS", "date": "2026-05-23", "min_flights": 1, "airline": "Eurowings", "price": 160.0}
|
||||
]
|
||||
}
|
||||
}
|
||||
195
flight-comparator/tests/conftest.py
Normal file
195
flight-comparator/tests/conftest.py
Normal file
@@ -0,0 +1,195 @@
|
||||
"""
|
||||
Test fixtures and configuration for Flight Radar Web App tests.
|
||||
|
||||
This module provides reusable fixtures for testing the API.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import sqlite3
|
||||
import os
|
||||
import tempfile
|
||||
from fastapi.testclient import TestClient
|
||||
from typing import Generator
|
||||
|
||||
# Import the FastAPI app
|
||||
import sys
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
|
||||
|
||||
from api_server import app, rate_limiter, log_buffer
|
||||
from database import get_connection
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_db_path() -> Generator[str, None, None]:
|
||||
"""Create a temporary database for testing."""
|
||||
# Create temporary database
|
||||
fd, path = tempfile.mkstemp(suffix=".db")
|
||||
os.close(fd)
|
||||
|
||||
# Set environment variable to use test database
|
||||
original_db = os.environ.get('DATABASE_PATH')
|
||||
os.environ['DATABASE_PATH'] = path
|
||||
|
||||
yield path
|
||||
|
||||
# Cleanup
|
||||
if original_db:
|
||||
os.environ['DATABASE_PATH'] = original_db
|
||||
else:
|
||||
os.environ.pop('DATABASE_PATH', None)
|
||||
|
||||
try:
|
||||
os.unlink(path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def clean_database(test_db_path):
|
||||
"""Provide a clean database for each test."""
|
||||
# Initialize database with schema
|
||||
conn = sqlite3.connect(test_db_path)
|
||||
conn.execute("PRAGMA foreign_keys = ON") # Enable foreign keys
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Read and execute schema
|
||||
schema_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'database', 'schema.sql')
|
||||
with open(schema_path, 'r') as f:
|
||||
schema_sql = f.read()
|
||||
cursor.executescript(schema_sql)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
yield test_db_path
|
||||
|
||||
# Clean up all tables after test
|
||||
conn = sqlite3.connect(test_db_path)
|
||||
conn.execute("PRAGMA foreign_keys = ON") # Enable foreign keys for cleanup
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("DELETE FROM routes")
|
||||
cursor.execute("DELETE FROM scans")
|
||||
# Note: flight_searches and flight_results are not in web app schema
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def client(clean_database) -> TestClient:
|
||||
"""Provide a test client for the FastAPI app."""
|
||||
# Clear rate limiter for each test
|
||||
rate_limiter.requests.clear()
|
||||
|
||||
# Clear log buffer for each test
|
||||
log_buffer.clear()
|
||||
|
||||
with TestClient(app) as test_client:
|
||||
yield test_client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_scan_data():
|
||||
"""Provide sample scan data for testing."""
|
||||
return {
|
||||
"origin": "BDS",
|
||||
"country": "DE",
|
||||
"start_date": "2026-04-01",
|
||||
"end_date": "2026-06-30",
|
||||
"seat_class": "economy",
|
||||
"adults": 2
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_route_data():
|
||||
"""Provide sample route data for testing."""
|
||||
return {
|
||||
"scan_id": 1,
|
||||
"destination": "MUC",
|
||||
"destination_name": "Munich Airport",
|
||||
"destination_city": "Munich",
|
||||
"flight_count": 45,
|
||||
"airlines": '["Lufthansa", "Ryanair"]',
|
||||
"min_price": 89.99,
|
||||
"max_price": 299.99,
|
||||
"avg_price": 150.50
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def create_test_scan(clean_database, sample_scan_data):
|
||||
"""Create a test scan in the database."""
|
||||
def _create_scan(**kwargs):
|
||||
# Merge with defaults
|
||||
data = {**sample_scan_data, **kwargs}
|
||||
|
||||
conn = sqlite3.connect(clean_database)
|
||||
conn.execute("PRAGMA foreign_keys = ON") # Enable foreign keys
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("""
|
||||
INSERT INTO scans (origin, country, start_date, end_date, status, seat_class, adults)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
data['origin'],
|
||||
data['country'],
|
||||
data['start_date'],
|
||||
data['end_date'],
|
||||
data.get('status', 'pending'),
|
||||
data['seat_class'],
|
||||
data['adults']
|
||||
))
|
||||
|
||||
scan_id = cursor.lastrowid
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return scan_id
|
||||
|
||||
return _create_scan
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def create_test_route(clean_database, sample_route_data):
|
||||
"""Create a test route in the database."""
|
||||
def _create_route(**kwargs):
|
||||
# Merge with defaults
|
||||
data = {**sample_route_data, **kwargs}
|
||||
|
||||
conn = sqlite3.connect(clean_database)
|
||||
conn.execute("PRAGMA foreign_keys = ON") # Enable foreign keys
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("""
|
||||
INSERT INTO routes (scan_id, destination, destination_name, destination_city,
|
||||
flight_count, airlines, min_price, max_price, avg_price)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
data['scan_id'],
|
||||
data['destination'],
|
||||
data['destination_name'],
|
||||
data['destination_city'],
|
||||
data['flight_count'],
|
||||
data['airlines'],
|
||||
data['min_price'],
|
||||
data['max_price'],
|
||||
data['avg_price']
|
||||
))
|
||||
|
||||
route_id = cursor.lastrowid
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return route_id
|
||||
|
||||
return _create_route
|
||||
|
||||
|
||||
# Marker helpers for categorizing tests
|
||||
def pytest_configure(config):
|
||||
"""Configure custom pytest markers."""
|
||||
config.addinivalue_line("markers", "unit: Unit tests (fast, isolated)")
|
||||
config.addinivalue_line("markers", "integration: Integration tests (slower)")
|
||||
config.addinivalue_line("markers", "slow: Slow tests")
|
||||
config.addinivalue_line("markers", "database: Tests that interact with database")
|
||||
config.addinivalue_line("markers", "api: Tests for API endpoints")
|
||||
53
flight-comparator/tests/test_airports.py
Normal file
53
flight-comparator/tests/test_airports.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""
|
||||
Smoke tests for airports module.
|
||||
"""
|
||||
|
||||
import sys
|
||||
sys.path.insert(0, '..')
|
||||
|
||||
from airports import get_airports_for_country, resolve_airport_list
|
||||
|
||||
|
||||
def test_get_airports_for_country():
|
||||
"""Test loading airports for a country."""
|
||||
de_airports = get_airports_for_country("DE")
|
||||
assert len(de_airports) > 0
|
||||
assert all('iata' in a for a in de_airports)
|
||||
assert all('name' in a for a in de_airports)
|
||||
assert all('city' in a for a in de_airports)
|
||||
print(f"✓ Found {len(de_airports)} airports in Germany")
|
||||
|
||||
|
||||
def test_resolve_airport_list_from_country():
|
||||
"""Test resolving airport list from country."""
|
||||
airports = resolve_airport_list("DE", None)
|
||||
assert len(airports) > 0
|
||||
print(f"✓ Resolved {len(airports)} airports from country DE")
|
||||
|
||||
|
||||
def test_resolve_airport_list_from_custom():
|
||||
"""Test resolving airport list from custom --from argument."""
|
||||
airports = resolve_airport_list(None, "FRA,MUC,BER")
|
||||
assert len(airports) == 3
|
||||
assert airports[0]['iata'] == 'FRA'
|
||||
assert airports[1]['iata'] == 'MUC'
|
||||
assert airports[2]['iata'] == 'BER'
|
||||
print(f"✓ Resolved custom airport list: {[a['iata'] for a in airports]}")
|
||||
|
||||
|
||||
def test_invalid_country():
|
||||
"""Test handling of invalid country code."""
|
||||
try:
|
||||
get_airports_for_country("XX")
|
||||
assert False, "Should have raised ValueError"
|
||||
except ValueError as e:
|
||||
assert "not found" in str(e)
|
||||
print("✓ Invalid country code raises appropriate error")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_get_airports_for_country()
|
||||
test_resolve_airport_list_from_country()
|
||||
test_resolve_airport_list_from_custom()
|
||||
test_invalid_country()
|
||||
print("\n✅ All airports tests passed!")
|
||||
363
flight-comparator/tests/test_api_endpoints.py
Normal file
363
flight-comparator/tests/test_api_endpoints.py
Normal file
@@ -0,0 +1,363 @@
|
||||
"""
|
||||
Unit tests for API endpoints.
|
||||
|
||||
Tests all API endpoints with various scenarios including success cases,
|
||||
error cases, validation, pagination, and edge cases.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.api
|
||||
class TestHealthEndpoint:
|
||||
"""Tests for the health check endpoint."""
|
||||
|
||||
def test_health_endpoint(self, client: TestClient):
|
||||
"""Test health endpoint returns 200 OK."""
|
||||
response = client.get("/health")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json() == {"status": "healthy", "version": "2.0.0"}
|
||||
|
||||
def test_health_no_rate_limit(self, client: TestClient):
|
||||
"""Test health endpoint is excluded from rate limiting."""
|
||||
response = client.get("/health")
|
||||
|
||||
assert "x-ratelimit-limit" not in response.headers
|
||||
assert "x-ratelimit-remaining" not in response.headers
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.api
|
||||
class TestAirportEndpoints:
|
||||
"""Tests for airport search endpoints."""
|
||||
|
||||
def test_search_airports_valid(self, client: TestClient):
|
||||
"""Test airport search with valid query."""
|
||||
response = client.get("/api/v1/airports?q=MUC")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert "data" in data
|
||||
assert "pagination" in data
|
||||
assert isinstance(data["data"], list)
|
||||
assert len(data["data"]) > 0
|
||||
|
||||
# Check first result
|
||||
airport = data["data"][0]
|
||||
assert "iata" in airport
|
||||
assert "name" in airport
|
||||
assert "MUC" in airport["iata"]
|
||||
|
||||
def test_search_airports_pagination(self, client: TestClient):
|
||||
"""Test airport search pagination."""
|
||||
response = client.get("/api/v1/airports?q=airport&page=1&limit=5")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["pagination"]["page"] == 1
|
||||
assert data["pagination"]["limit"] == 5
|
||||
assert len(data["data"]) <= 5
|
||||
|
||||
def test_search_airports_invalid_query_too_short(self, client: TestClient):
|
||||
"""Test airport search with query too short."""
|
||||
response = client.get("/api/v1/airports?q=M")
|
||||
|
||||
assert response.status_code == 422
|
||||
error = response.json()
|
||||
assert error["error"] == "validation_error"
|
||||
|
||||
def test_search_airports_rate_limit_headers(self, client: TestClient):
|
||||
"""Test airport search includes rate limit headers."""
|
||||
response = client.get("/api/v1/airports?q=MUC")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert "x-ratelimit-limit" in response.headers
|
||||
assert "x-ratelimit-remaining" in response.headers
|
||||
assert "x-ratelimit-reset" in response.headers
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.api
|
||||
@pytest.mark.database
|
||||
class TestScanEndpoints:
|
||||
"""Tests for scan management endpoints."""
|
||||
|
||||
def test_create_scan_valid(self, client: TestClient, sample_scan_data):
|
||||
"""Test creating a scan with valid data."""
|
||||
response = client.post("/api/v1/scans", json=sample_scan_data)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["status"] == "pending"
|
||||
assert data["id"] > 0
|
||||
assert data["scan"]["origin"] == sample_scan_data["origin"]
|
||||
assert data["scan"]["country"] == sample_scan_data["country"]
|
||||
|
||||
def test_create_scan_with_defaults(self, client: TestClient):
|
||||
"""Test creating a scan with default dates."""
|
||||
data = {
|
||||
"origin": "MUC",
|
||||
"country": "IT",
|
||||
"window_months": 3
|
||||
}
|
||||
|
||||
response = client.post("/api/v1/scans", json=data)
|
||||
|
||||
assert response.status_code == 200
|
||||
scan = response.json()["scan"]
|
||||
|
||||
assert "start_date" in scan
|
||||
assert "end_date" in scan
|
||||
assert scan["seat_class"] == "economy"
|
||||
assert scan["adults"] == 1
|
||||
|
||||
def test_create_scan_invalid_origin(self, client: TestClient):
|
||||
"""Test creating a scan with invalid origin."""
|
||||
data = {
|
||||
"origin": "INVALID", # Too long
|
||||
"country": "DE"
|
||||
}
|
||||
|
||||
response = client.post("/api/v1/scans", json=data)
|
||||
|
||||
assert response.status_code == 422
|
||||
error = response.json()
|
||||
assert error["error"] == "validation_error"
|
||||
|
||||
def test_create_scan_invalid_country(self, client: TestClient):
|
||||
"""Test creating a scan with invalid country."""
|
||||
data = {
|
||||
"origin": "BDS",
|
||||
"country": "DEU" # Too long
|
||||
}
|
||||
|
||||
response = client.post("/api/v1/scans", json=data)
|
||||
|
||||
assert response.status_code == 422
|
||||
|
||||
def test_list_scans_empty(self, client: TestClient):
|
||||
"""Test listing scans when database is empty."""
|
||||
response = client.get("/api/v1/scans")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["data"] == []
|
||||
assert data["pagination"]["total"] == 0
|
||||
|
||||
def test_list_scans_with_data(self, client: TestClient, create_test_scan):
|
||||
"""Test listing scans with data."""
|
||||
# Create test scans
|
||||
create_test_scan(origin="BDS", country="DE")
|
||||
create_test_scan(origin="MUC", country="IT")
|
||||
|
||||
response = client.get("/api/v1/scans")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert len(data["data"]) == 2
|
||||
assert data["pagination"]["total"] == 2
|
||||
|
||||
def test_list_scans_pagination(self, client: TestClient, create_test_scan):
|
||||
"""Test scan list pagination."""
|
||||
# Create 5 scans
|
||||
for i in range(5):
|
||||
create_test_scan(origin="BDS", country="DE")
|
||||
|
||||
response = client.get("/api/v1/scans?page=1&limit=2")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert len(data["data"]) == 2
|
||||
assert data["pagination"]["total"] == 5
|
||||
assert data["pagination"]["pages"] == 3
|
||||
assert data["pagination"]["has_next"] is True
|
||||
|
||||
def test_list_scans_filter_by_status(self, client: TestClient, create_test_scan):
|
||||
"""Test filtering scans by status."""
|
||||
create_test_scan(status="pending")
|
||||
create_test_scan(status="completed")
|
||||
create_test_scan(status="pending")
|
||||
|
||||
response = client.get("/api/v1/scans?status=pending")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert len(data["data"]) == 2
|
||||
assert all(scan["status"] == "pending" for scan in data["data"])
|
||||
|
||||
def test_get_scan_by_id(self, client: TestClient, create_test_scan):
|
||||
"""Test getting a specific scan by ID."""
|
||||
scan_id = create_test_scan(origin="FRA", country="ES")
|
||||
|
||||
response = client.get(f"/api/v1/scans/{scan_id}")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["id"] == scan_id
|
||||
assert data["origin"] == "FRA"
|
||||
assert data["country"] == "ES"
|
||||
|
||||
def test_get_scan_not_found(self, client: TestClient):
|
||||
"""Test getting a non-existent scan."""
|
||||
response = client.get("/api/v1/scans/999")
|
||||
|
||||
assert response.status_code == 404
|
||||
error = response.json()
|
||||
assert error["error"] == "not_found"
|
||||
assert "999" in error["message"]
|
||||
|
||||
def test_get_scan_routes_empty(self, client: TestClient, create_test_scan):
|
||||
"""Test getting routes for a scan with no routes."""
|
||||
scan_id = create_test_scan()
|
||||
|
||||
response = client.get(f"/api/v1/scans/{scan_id}/routes")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["data"] == []
|
||||
assert data["pagination"]["total"] == 0
|
||||
|
||||
def test_get_scan_routes_with_data(self, client: TestClient, create_test_scan, create_test_route):
|
||||
"""Test getting routes for a scan with data."""
|
||||
scan_id = create_test_scan()
|
||||
create_test_route(scan_id=scan_id, destination="MUC", min_price=100)
|
||||
create_test_route(scan_id=scan_id, destination="FRA", min_price=50)
|
||||
|
||||
response = client.get(f"/api/v1/scans/{scan_id}/routes")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert len(data["data"]) == 2
|
||||
# Routes should be ordered by price (cheapest first)
|
||||
assert data["data"][0]["destination"] == "FRA"
|
||||
assert data["data"][0]["min_price"] == 50
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.api
|
||||
class TestLogEndpoints:
|
||||
"""Tests for log viewer endpoints."""
|
||||
|
||||
def test_get_logs_empty(self, client: TestClient):
|
||||
"""Test getting logs when buffer is empty."""
|
||||
response = client.get("/api/v1/logs")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# May have some startup logs
|
||||
assert "data" in data
|
||||
assert "pagination" in data
|
||||
|
||||
def test_get_logs_with_level_filter(self, client: TestClient):
|
||||
"""Test filtering logs by level."""
|
||||
response = client.get("/api/v1/logs?level=INFO")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
if data["data"]:
|
||||
assert all(log["level"] == "INFO" for log in data["data"])
|
||||
|
||||
def test_get_logs_invalid_level(self, client: TestClient):
|
||||
"""Test filtering logs with invalid level."""
|
||||
response = client.get("/api/v1/logs?level=INVALID")
|
||||
|
||||
assert response.status_code == 400
|
||||
error = response.json()
|
||||
assert error["error"] == "bad_request"
|
||||
|
||||
def test_get_logs_search(self, client: TestClient):
|
||||
"""Test searching logs by text."""
|
||||
response = client.get("/api/v1/logs?search=startup")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
if data["data"]:
|
||||
assert all("startup" in log["message"].lower() for log in data["data"])
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.api
|
||||
class TestErrorHandling:
|
||||
"""Tests for error handling."""
|
||||
|
||||
def test_request_id_in_error(self, client: TestClient):
|
||||
"""Test that errors include request ID."""
|
||||
response = client.get("/api/v1/scans/999")
|
||||
|
||||
assert response.status_code == 404
|
||||
error = response.json()
|
||||
|
||||
assert "request_id" in error
|
||||
assert len(error["request_id"]) == 8 # UUID shortened to 8 chars
|
||||
|
||||
def test_request_id_in_headers(self, client: TestClient):
|
||||
"""Test that request ID is in headers."""
|
||||
response = client.get("/api/v1/scans")
|
||||
|
||||
assert "x-request-id" in response.headers
|
||||
assert len(response.headers["x-request-id"]) == 8
|
||||
|
||||
def test_validation_error_format(self, client: TestClient):
|
||||
"""Test validation error response format."""
|
||||
response = client.post("/api/v1/scans", json={"origin": "TOOLONG", "country": "DE"})
|
||||
|
||||
assert response.status_code == 422
|
||||
error = response.json()
|
||||
|
||||
assert error["error"] == "validation_error"
|
||||
assert "errors" in error
|
||||
assert isinstance(error["errors"], list)
|
||||
assert len(error["errors"]) > 0
|
||||
assert "field" in error["errors"][0]
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@pytest.mark.api
|
||||
class TestRateLimiting:
|
||||
"""Tests for rate limiting."""
|
||||
|
||||
def test_rate_limit_headers_present(self, client: TestClient):
|
||||
"""Test that rate limit headers are present."""
|
||||
response = client.get("/api/v1/airports?q=MUC")
|
||||
|
||||
assert "x-ratelimit-limit" in response.headers
|
||||
assert "x-ratelimit-remaining" in response.headers
|
||||
assert "x-ratelimit-reset" in response.headers
|
||||
|
||||
def test_rate_limit_decreases(self, client: TestClient):
|
||||
"""Test that rate limit remaining decreases."""
|
||||
response1 = client.get("/api/v1/airports?q=MUC")
|
||||
remaining1 = int(response1.headers["x-ratelimit-remaining"])
|
||||
|
||||
response2 = client.get("/api/v1/airports?q=MUC")
|
||||
remaining2 = int(response2.headers["x-ratelimit-remaining"])
|
||||
|
||||
assert remaining2 < remaining1
|
||||
|
||||
def test_rate_limit_exceeded(self, client: TestClient):
|
||||
"""Test rate limit exceeded response."""
|
||||
# Make requests until limit is reached (scans endpoint has limit of 10)
|
||||
for i in range(12):
|
||||
response = client.post("/api/v1/scans", json={"origin": "BDS", "country": "DE"})
|
||||
|
||||
# Should get 429 eventually
|
||||
assert response.status_code == 429
|
||||
error = response.json()
|
||||
assert error["error"] == "rate_limit_exceeded"
|
||||
assert "retry_after" in error
|
||||
372
flight-comparator/tests/test_comprehensive_v3.py
Executable file
372
flight-comparator/tests/test_comprehensive_v3.py
Executable file
@@ -0,0 +1,372 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Comprehensive test suite for fast-flights v3.0rc1 with SOCS cookie integration.
|
||||
Tests multiple routes, dates, and edge cases.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import logging
|
||||
import asyncio
|
||||
from datetime import date, timedelta
|
||||
|
||||
sys.path.insert(0, '..')
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from searcher_v3 import search_direct_flights, search_multiple_routes, SOCSCookieIntegration
|
||||
from fast_flights import FlightQuery, Passengers, get_flights, create_query
|
||||
HAS_V3 = True
|
||||
except ImportError as e:
|
||||
logger.error(f"✗ Failed to import v3 modules: {e}")
|
||||
logger.error(" Install with: pip install --upgrade git+https://github.com/AWeirdDev/flights.git")
|
||||
HAS_V3 = False
|
||||
|
||||
|
||||
class TestResults:
|
||||
"""Track test results."""
|
||||
def __init__(self):
|
||||
self.total = 0
|
||||
self.passed = 0
|
||||
self.failed = 0
|
||||
self.errors = []
|
||||
|
||||
def add_pass(self, test_name):
|
||||
self.total += 1
|
||||
self.passed += 1
|
||||
logger.info(f"✓ PASS: {test_name}")
|
||||
|
||||
def add_fail(self, test_name, reason):
|
||||
self.total += 1
|
||||
self.failed += 1
|
||||
self.errors.append((test_name, reason))
|
||||
logger.error(f"✗ FAIL: {test_name} - {reason}")
|
||||
|
||||
def summary(self):
|
||||
logger.info("\n" + "="*80)
|
||||
logger.info("TEST SUMMARY")
|
||||
logger.info("="*80)
|
||||
logger.info(f"Total: {self.total}")
|
||||
logger.info(f"Passed: {self.passed} ({self.passed/self.total*100:.1f}%)")
|
||||
logger.info(f"Failed: {self.failed}")
|
||||
|
||||
if self.errors:
|
||||
logger.info("\nFailed Tests:")
|
||||
for name, reason in self.errors:
|
||||
logger.info(f" • {name}: {reason}")
|
||||
|
||||
return self.failed == 0
|
||||
|
||||
|
||||
results = TestResults()
|
||||
|
||||
|
||||
def test_socs_integration():
|
||||
"""Test that SOCS cookie integration is properly configured."""
|
||||
if not HAS_V3:
|
||||
results.add_fail("SOCS Integration", "v3 not installed")
|
||||
return
|
||||
|
||||
try:
|
||||
integration = SOCSCookieIntegration()
|
||||
assert hasattr(integration, 'SOCS_COOKIE')
|
||||
assert integration.SOCS_COOKIE.startswith('CAE')
|
||||
assert hasattr(integration, 'fetch_html')
|
||||
results.add_pass("SOCS Integration")
|
||||
except Exception as e:
|
||||
results.add_fail("SOCS Integration", str(e))
|
||||
|
||||
|
||||
async def test_single_route_ber_bri():
|
||||
"""Test BER to BRI route (known to work)."""
|
||||
if not HAS_V3:
|
||||
results.add_fail("BER→BRI Single Route", "v3 not installed")
|
||||
return
|
||||
|
||||
try:
|
||||
test_date = (date.today() + timedelta(days=30)).strftime('%Y-%m-%d')
|
||||
flights = await search_direct_flights("BER", "BRI", test_date)
|
||||
|
||||
if flights and len(flights) > 0:
|
||||
# Verify flight structure
|
||||
f = flights[0]
|
||||
assert 'origin' in f
|
||||
assert 'destination' in f
|
||||
assert 'price' in f
|
||||
assert 'airline' in f
|
||||
assert f['origin'] == 'BER'
|
||||
assert f['destination'] == 'BRI'
|
||||
assert f['price'] > 0
|
||||
|
||||
logger.info(f" Found {len(flights)} flight(s), cheapest: €{flights[0]['price']}")
|
||||
results.add_pass("BER→BRI Single Route")
|
||||
else:
|
||||
results.add_fail("BER→BRI Single Route", "No flights found")
|
||||
except Exception as e:
|
||||
results.add_fail("BER→BRI Single Route", str(e))
|
||||
|
||||
|
||||
async def test_multiple_routes():
|
||||
"""Test multiple routes in one batch."""
|
||||
if not HAS_V3:
|
||||
results.add_fail("Multiple Routes", "v3 not installed")
|
||||
return
|
||||
|
||||
try:
|
||||
test_date = (date.today() + timedelta(days=30)).strftime('%Y-%m-%d')
|
||||
|
||||
routes = [
|
||||
("BER", "FCO", test_date), # Berlin to Rome
|
||||
("FRA", "MAD", test_date), # Frankfurt to Madrid
|
||||
("MUC", "BCN", test_date), # Munich to Barcelona
|
||||
]
|
||||
|
||||
batch_results = await search_multiple_routes(
|
||||
routes,
|
||||
seat_class="economy",
|
||||
adults=1,
|
||||
max_workers=3,
|
||||
)
|
||||
|
||||
# Check we got results for each route
|
||||
flights_found = sum(1 for flights in batch_results.values() if flights)
|
||||
|
||||
if flights_found >= 2: # At least 2 out of 3 should have flights
|
||||
logger.info(f" Found flights for {flights_found}/3 routes")
|
||||
results.add_pass("Multiple Routes")
|
||||
else:
|
||||
results.add_fail("Multiple Routes", f"Only {flights_found}/3 routes had flights")
|
||||
|
||||
except Exception as e:
|
||||
results.add_fail("Multiple Routes", str(e))
|
||||
|
||||
|
||||
async def test_different_dates():
|
||||
"""Test same route with different dates."""
|
||||
if not HAS_V3:
|
||||
results.add_fail("Different Dates", "v3 not installed")
|
||||
return
|
||||
|
||||
try:
|
||||
dates = [
|
||||
(date.today() + timedelta(days=30)).strftime('%Y-%m-%d'),
|
||||
(date.today() + timedelta(days=60)).strftime('%Y-%m-%d'),
|
||||
(date.today() + timedelta(days=90)).strftime('%Y-%m-%d'),
|
||||
]
|
||||
|
||||
routes = [("BER", "BRI", d) for d in dates]
|
||||
|
||||
batch_results = await search_multiple_routes(
|
||||
routes,
|
||||
seat_class="economy",
|
||||
adults=1,
|
||||
max_workers=2,
|
||||
)
|
||||
|
||||
flights_found = sum(1 for flights in batch_results.values() if flights)
|
||||
|
||||
if flights_found >= 2:
|
||||
logger.info(f" Found flights for {flights_found}/3 dates")
|
||||
results.add_pass("Different Dates")
|
||||
else:
|
||||
results.add_fail("Different Dates", f"Only {flights_found}/3 dates had flights")
|
||||
|
||||
except Exception as e:
|
||||
results.add_fail("Different Dates", str(e))
|
||||
|
||||
|
||||
async def test_no_direct_flights():
|
||||
"""Test route with no direct flights (should return empty)."""
|
||||
if not HAS_V3:
|
||||
results.add_fail("No Direct Flights", "v3 not installed")
|
||||
return
|
||||
|
||||
try:
|
||||
test_date = (date.today() + timedelta(days=30)).strftime('%Y-%m-%d')
|
||||
|
||||
# BER to SYD probably has no direct flights
|
||||
flights = await search_direct_flights("BER", "SYD", test_date)
|
||||
|
||||
# Should return empty list, not crash
|
||||
assert isinstance(flights, list)
|
||||
|
||||
logger.info(f" Correctly handled no-direct-flights case (found {len(flights)})")
|
||||
results.add_pass("No Direct Flights")
|
||||
|
||||
except Exception as e:
|
||||
results.add_fail("No Direct Flights", str(e))
|
||||
|
||||
|
||||
async def test_invalid_airport_code():
|
||||
"""Test handling of invalid airport codes."""
|
||||
if not HAS_V3:
|
||||
results.add_fail("Invalid Airport", "v3 not installed")
|
||||
return
|
||||
|
||||
try:
|
||||
test_date = (date.today() + timedelta(days=30)).strftime('%Y-%m-%d')
|
||||
|
||||
# XXX is not a valid IATA code
|
||||
flights = await search_direct_flights("XXX", "BRI", test_date)
|
||||
|
||||
# Should return empty or handle gracefully, not crash
|
||||
assert isinstance(flights, list)
|
||||
|
||||
logger.info(f" Gracefully handled invalid airport code")
|
||||
results.add_pass("Invalid Airport")
|
||||
|
||||
except Exception as e:
|
||||
results.add_fail("Invalid Airport", str(e))
|
||||
|
||||
|
||||
async def test_concurrent_requests():
|
||||
"""Test that concurrent requests work properly."""
|
||||
if not HAS_V3:
|
||||
results.add_fail("Concurrent Requests", "v3 not installed")
|
||||
return
|
||||
|
||||
try:
|
||||
test_date = (date.today() + timedelta(days=30)).strftime('%Y-%m-%d')
|
||||
|
||||
# 10 concurrent requests
|
||||
routes = [
|
||||
("BER", "BRI", test_date),
|
||||
("FRA", "FCO", test_date),
|
||||
("MUC", "VIE", test_date),
|
||||
("BER", "CPH", test_date),
|
||||
("FRA", "AMS", test_date),
|
||||
("MUC", "ZRH", test_date),
|
||||
("BER", "VIE", test_date),
|
||||
("FRA", "BRU", test_date),
|
||||
("MUC", "CDG", test_date),
|
||||
("BER", "AMS", test_date),
|
||||
]
|
||||
|
||||
import time
|
||||
start = time.time()
|
||||
|
||||
batch_results = await search_multiple_routes(
|
||||
routes,
|
||||
seat_class="economy",
|
||||
adults=1,
|
||||
max_workers=5,
|
||||
)
|
||||
|
||||
elapsed = time.time() - start
|
||||
|
||||
flights_found = sum(1 for flights in batch_results.values() if flights)
|
||||
|
||||
# Should complete reasonably fast with concurrency
|
||||
if flights_found >= 5 and elapsed < 60:
|
||||
logger.info(f" {flights_found}/10 routes successful in {elapsed:.1f}s")
|
||||
results.add_pass("Concurrent Requests")
|
||||
else:
|
||||
results.add_fail("Concurrent Requests", f"Only {flights_found}/10 in {elapsed:.1f}s")
|
||||
|
||||
except Exception as e:
|
||||
results.add_fail("Concurrent Requests", str(e))
|
||||
|
||||
|
||||
async def test_price_range():
|
||||
"""Test that prices are reasonable."""
|
||||
if not HAS_V3:
|
||||
results.add_fail("Price Range", "v3 not installed")
|
||||
return
|
||||
|
||||
try:
|
||||
test_date = (date.today() + timedelta(days=30)).strftime('%Y-%m-%d')
|
||||
flights = await search_direct_flights("BER", "BRI", test_date)
|
||||
|
||||
if flights:
|
||||
prices = [f['price'] for f in flights if 'price' in f]
|
||||
|
||||
if prices:
|
||||
min_price = min(prices)
|
||||
max_price = max(prices)
|
||||
|
||||
# Sanity check: prices should be between 20 and 1000 EUR for EU routes
|
||||
if 20 <= min_price <= 1000 and 20 <= max_price <= 1000:
|
||||
logger.info(f" Price range: €{min_price} - €{max_price}")
|
||||
results.add_pass("Price Range")
|
||||
else:
|
||||
results.add_fail("Price Range", f"Unreasonable prices: €{min_price} - €{max_price}")
|
||||
else:
|
||||
results.add_fail("Price Range", "No prices found in results")
|
||||
else:
|
||||
results.add_fail("Price Range", "No flights to check prices")
|
||||
|
||||
except Exception as e:
|
||||
results.add_fail("Price Range", str(e))
|
||||
|
||||
|
||||
async def run_all_tests():
|
||||
"""Run all tests."""
|
||||
logger.info("╔" + "="*78 + "╗")
|
||||
logger.info("║" + " "*15 + "COMPREHENSIVE TEST SUITE - fast-flights v3.0rc1" + " "*14 + "║")
|
||||
logger.info("╚" + "="*78 + "╝\n")
|
||||
|
||||
if not HAS_V3:
|
||||
logger.error("fast-flights v3.0rc1 not installed!")
|
||||
logger.error("Install with: pip install --upgrade git+https://github.com/AWeirdDev/flights.git")
|
||||
return False
|
||||
|
||||
# Unit tests
|
||||
logger.info("\n" + "-"*80)
|
||||
logger.info("UNIT TESTS")
|
||||
logger.info("-"*80)
|
||||
test_socs_integration()
|
||||
|
||||
# Integration tests
|
||||
logger.info("\n" + "-"*80)
|
||||
logger.info("INTEGRATION TESTS")
|
||||
logger.info("-"*80)
|
||||
|
||||
await test_single_route_ber_bri()
|
||||
await asyncio.sleep(2) # Rate limiting
|
||||
|
||||
await test_multiple_routes()
|
||||
await asyncio.sleep(2)
|
||||
|
||||
await test_different_dates()
|
||||
await asyncio.sleep(2)
|
||||
|
||||
await test_no_direct_flights()
|
||||
await asyncio.sleep(2)
|
||||
|
||||
await test_invalid_airport_code()
|
||||
await asyncio.sleep(2)
|
||||
|
||||
# Stress tests
|
||||
logger.info("\n" + "-"*80)
|
||||
logger.info("STRESS TESTS")
|
||||
logger.info("-"*80)
|
||||
|
||||
await test_concurrent_requests()
|
||||
await asyncio.sleep(2)
|
||||
|
||||
# Validation tests
|
||||
logger.info("\n" + "-"*80)
|
||||
logger.info("VALIDATION TESTS")
|
||||
logger.info("-"*80)
|
||||
|
||||
await test_price_range()
|
||||
|
||||
# Summary
|
||||
return results.summary()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = asyncio.run(run_all_tests())
|
||||
|
||||
logger.info("\n" + "="*80)
|
||||
if success:
|
||||
logger.info("✅ ALL TESTS PASSED!")
|
||||
else:
|
||||
logger.info("⚠️ SOME TESTS FAILED - See summary above")
|
||||
logger.info("="*80)
|
||||
|
||||
sys.exit(0 if success else 1)
|
||||
64
flight-comparator/tests/test_date_resolver.py
Normal file
64
flight-comparator/tests/test_date_resolver.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""
|
||||
Smoke tests for date_resolver module.
|
||||
"""
|
||||
|
||||
from datetime import date
|
||||
from dateutil.relativedelta import relativedelta
|
||||
import sys
|
||||
sys.path.insert(0, '..')
|
||||
|
||||
from date_resolver import resolve_dates, detect_new_connections, SEARCH_WINDOW_MONTHS
|
||||
|
||||
|
||||
def test_resolve_dates_with_specific_date():
|
||||
"""Test that a specific date returns only that date."""
|
||||
result = resolve_dates("2026-06-15", 6)
|
||||
assert result == ["2026-06-15"]
|
||||
print("✓ Specific date resolution works")
|
||||
|
||||
|
||||
def test_resolve_dates_seasonal():
|
||||
"""Test that seasonal mode generates one date per month."""
|
||||
result = resolve_dates(None, 3)
|
||||
assert len(result) == 3
|
||||
# All should be valid date strings
|
||||
for date_str in result:
|
||||
assert len(date_str) == 10 # YYYY-MM-DD format
|
||||
assert date_str.count('-') == 2
|
||||
print(f"✓ Seasonal resolution works: {result}")
|
||||
|
||||
|
||||
def test_detect_new_connections():
|
||||
"""Test new connection detection logic."""
|
||||
monthly_results = {
|
||||
"2026-03": [
|
||||
{"origin": "FRA", "destination": "JFK"},
|
||||
{"origin": "MUC", "destination": "JFK"},
|
||||
],
|
||||
"2026-04": [
|
||||
{"origin": "FRA", "destination": "JFK"},
|
||||
{"origin": "MUC", "destination": "JFK"},
|
||||
{"origin": "BER", "destination": "JFK"}, # NEW
|
||||
],
|
||||
"2026-05": [
|
||||
{"origin": "FRA", "destination": "JFK"},
|
||||
{"origin": "BER", "destination": "JFK"},
|
||||
{"origin": "HAM", "destination": "JFK"}, # NEW
|
||||
],
|
||||
}
|
||||
|
||||
new = detect_new_connections(monthly_results)
|
||||
assert "BER->JFK" in new
|
||||
assert new["BER->JFK"] == "2026-04"
|
||||
assert "HAM->JFK" in new
|
||||
assert new["HAM->JFK"] == "2026-05"
|
||||
assert "FRA->JFK" not in new # Was in first month
|
||||
assert "MUC->JFK" not in new # Was in first month
|
||||
print(f"✓ New connection detection works: {new}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_resolve_dates_with_specific_date()
|
||||
test_resolve_dates_seasonal()
|
||||
test_detect_new_connections()
|
||||
print("\n✅ All date_resolver tests passed!")
|
||||
23
flight-comparator/tests/test_formatter.py
Normal file
23
flight-comparator/tests/test_formatter.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
Smoke tests for formatter module.
|
||||
"""
|
||||
|
||||
import sys
|
||||
sys.path.insert(0, '..')
|
||||
|
||||
from formatter import format_duration
|
||||
|
||||
|
||||
def test_format_duration():
|
||||
"""Test duration formatting."""
|
||||
assert format_duration(0) == "—"
|
||||
assert format_duration(60) == "1h"
|
||||
assert format_duration(90) == "1h 30m"
|
||||
assert format_duration(570) == "9h 30m"
|
||||
assert format_duration(615) == "10h 15m"
|
||||
print("✓ Duration formatting works")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_format_duration()
|
||||
print("\n✅ All formatter tests passed!")
|
||||
309
flight-comparator/tests/test_integration.py
Normal file
309
flight-comparator/tests/test_integration.py
Normal file
@@ -0,0 +1,309 @@
|
||||
"""
|
||||
Integration tests for Flight Radar Web App.
|
||||
|
||||
Tests that verify multiple components working together, including
|
||||
database operations, full workflows, and system behavior.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import sqlite3
|
||||
import time
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.database
|
||||
class TestScanWorkflow:
|
||||
"""Integration tests for complete scan workflow."""
|
||||
|
||||
def test_create_and_retrieve_scan(self, client: TestClient):
|
||||
"""Test creating a scan and retrieving it."""
|
||||
# Create scan
|
||||
create_data = {
|
||||
"origin": "BDS",
|
||||
"country": "DE",
|
||||
"start_date": "2026-04-01",
|
||||
"end_date": "2026-06-30",
|
||||
"adults": 2
|
||||
}
|
||||
|
||||
create_response = client.post("/api/v1/scans", json=create_data)
|
||||
assert create_response.status_code == 200
|
||||
|
||||
scan_id = create_response.json()["id"]
|
||||
|
||||
# Retrieve scan
|
||||
get_response = client.get(f"/api/v1/scans/{scan_id}")
|
||||
assert get_response.status_code == 200
|
||||
|
||||
scan = get_response.json()
|
||||
assert scan["id"] == scan_id
|
||||
assert scan["origin"] == create_data["origin"]
|
||||
assert scan["country"] == create_data["country"]
|
||||
assert scan["status"] == "pending"
|
||||
|
||||
def test_scan_appears_in_list(self, client: TestClient):
|
||||
"""Test that created scan appears in list."""
|
||||
# Create scan
|
||||
create_response = client.post("/api/v1/scans", json={
|
||||
"origin": "MUC",
|
||||
"country": "IT"
|
||||
})
|
||||
|
||||
scan_id = create_response.json()["id"]
|
||||
|
||||
# List scans
|
||||
list_response = client.get("/api/v1/scans")
|
||||
scans = list_response.json()["data"]
|
||||
|
||||
# Find our scan
|
||||
found = any(scan["id"] == scan_id for scan in scans)
|
||||
assert found
|
||||
|
||||
def test_scan_with_routes_workflow(self, client: TestClient, create_test_route):
|
||||
"""Test creating scan and adding routes."""
|
||||
# Create scan
|
||||
create_response = client.post("/api/v1/scans", json={
|
||||
"origin": "BDS",
|
||||
"country": "DE"
|
||||
})
|
||||
|
||||
scan_id = create_response.json()["id"]
|
||||
|
||||
# Add routes
|
||||
create_test_route(scan_id=scan_id, destination="MUC", min_price=100)
|
||||
create_test_route(scan_id=scan_id, destination="FRA", min_price=50)
|
||||
create_test_route(scan_id=scan_id, destination="BER", min_price=75)
|
||||
|
||||
# Get routes
|
||||
routes_response = client.get(f"/api/v1/scans/{scan_id}/routes")
|
||||
assert routes_response.status_code == 200
|
||||
|
||||
routes = routes_response.json()["data"]
|
||||
assert len(routes) == 3
|
||||
|
||||
# Check ordering (by price)
|
||||
prices = [r["min_price"] for r in routes]
|
||||
assert prices == sorted(prices)
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.database
|
||||
class TestDatabaseOperations:
|
||||
"""Integration tests for database operations."""
|
||||
|
||||
def test_foreign_key_constraints(self, client: TestClient, clean_database):
|
||||
"""Test that foreign key constraints are enforced."""
|
||||
# Try to create route for non-existent scan
|
||||
conn = sqlite3.connect(clean_database)
|
||||
conn.execute("PRAGMA foreign_keys = ON") # Enable foreign keys
|
||||
cursor = conn.cursor()
|
||||
|
||||
with pytest.raises(sqlite3.IntegrityError):
|
||||
cursor.execute("""
|
||||
INSERT INTO routes (scan_id, destination, destination_name,
|
||||
destination_city, flight_count, airlines)
|
||||
VALUES (999, 'MUC', 'Munich', 'Munich', 10, '[]')
|
||||
""")
|
||||
conn.commit()
|
||||
|
||||
conn.close()
|
||||
|
||||
def test_cascade_delete(self, client: TestClient, create_test_scan, create_test_route, clean_database):
|
||||
"""Test that deleting scan cascades to routes."""
|
||||
# Create scan and routes
|
||||
scan_id = create_test_scan()
|
||||
create_test_route(scan_id=scan_id, destination="MUC")
|
||||
create_test_route(scan_id=scan_id, destination="FRA")
|
||||
|
||||
# Delete scan
|
||||
conn = sqlite3.connect(clean_database)
|
||||
conn.execute("PRAGMA foreign_keys = ON") # Enable foreign keys for cascade
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("DELETE FROM scans WHERE id = ?", (scan_id,))
|
||||
conn.commit()
|
||||
|
||||
# Check routes are deleted
|
||||
cursor.execute("SELECT COUNT(*) FROM routes WHERE scan_id = ?", (scan_id,))
|
||||
count = cursor.fetchone()[0]
|
||||
|
||||
conn.close()
|
||||
|
||||
assert count == 0
|
||||
|
||||
def test_timestamp_triggers(self, client: TestClient, create_test_scan, clean_database):
|
||||
"""Test that timestamp triggers work."""
|
||||
scan_id = create_test_scan()
|
||||
|
||||
# Get original timestamp
|
||||
conn = sqlite3.connect(clean_database)
|
||||
conn.execute("PRAGMA foreign_keys = ON") # Enable foreign keys
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("SELECT updated_at FROM scans WHERE id = ?", (scan_id,))
|
||||
original_time = cursor.fetchone()[0]
|
||||
|
||||
# Wait a moment (SQLite CURRENT_TIMESTAMP has 1-second precision)
|
||||
time.sleep(1.1)
|
||||
|
||||
# Update scan
|
||||
cursor.execute("UPDATE scans SET status = 'running' WHERE id = ?", (scan_id,))
|
||||
conn.commit()
|
||||
|
||||
# Get new timestamp
|
||||
cursor.execute("SELECT updated_at FROM scans WHERE id = ?", (scan_id,))
|
||||
new_time = cursor.fetchone()[0]
|
||||
|
||||
conn.close()
|
||||
|
||||
assert new_time != original_time
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
class TestPaginationAcrossEndpoints:
|
||||
"""Integration tests for pagination consistency."""
|
||||
|
||||
def test_pagination_metadata_consistency(self, client: TestClient, create_test_scan):
|
||||
"""Test pagination metadata is consistent across endpoints."""
|
||||
# Create 10 scans
|
||||
for i in range(10):
|
||||
create_test_scan()
|
||||
|
||||
# Test scans pagination
|
||||
response = client.get("/api/v1/scans?page=1&limit=3")
|
||||
data = response.json()
|
||||
|
||||
assert data["pagination"]["page"] == 1
|
||||
assert data["pagination"]["limit"] == 3
|
||||
assert data["pagination"]["total"] == 10
|
||||
assert data["pagination"]["pages"] == 4
|
||||
assert data["pagination"]["has_next"] is True
|
||||
assert data["pagination"]["has_prev"] is False
|
||||
|
||||
def test_pagination_last_page(self, client: TestClient, create_test_scan):
|
||||
"""Test pagination on last page."""
|
||||
# Create 7 scans
|
||||
for i in range(7):
|
||||
create_test_scan()
|
||||
|
||||
# Get last page
|
||||
response = client.get("/api/v1/scans?page=2&limit=5")
|
||||
data = response.json()
|
||||
|
||||
assert data["pagination"]["page"] == 2
|
||||
assert data["pagination"]["has_next"] is False
|
||||
assert data["pagination"]["has_prev"] is True
|
||||
assert len(data["data"]) == 2 # Only 2 items on last page
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
class TestErrorHandlingIntegration:
|
||||
"""Integration tests for error handling across the system."""
|
||||
|
||||
def test_error_logging(self, client: TestClient):
|
||||
"""Test that errors are logged."""
|
||||
# Trigger error
|
||||
client.get("/api/v1/scans/999")
|
||||
|
||||
# Check logs contain error (would need to check log buffer)
|
||||
# This is a basic integration test
|
||||
response = client.get("/api/v1/logs?search=not+found")
|
||||
# Just verify we can get logs, specific content may vary
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_request_id_consistency(self, client: TestClient):
|
||||
"""Test that request ID is consistent in error response and headers."""
|
||||
response = client.get("/api/v1/scans/999")
|
||||
|
||||
request_id_header = response.headers.get("x-request-id")
|
||||
request_id_body = response.json().get("request_id")
|
||||
|
||||
assert request_id_header == request_id_body
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
class TestRateLimitingIntegration:
|
||||
"""Integration tests for rate limiting system."""
|
||||
|
||||
def test_rate_limit_per_endpoint(self, client: TestClient):
|
||||
"""Test that different endpoints have different rate limits."""
|
||||
# Airports endpoint (100/min)
|
||||
airport_response = client.get("/api/v1/airports?q=MUC")
|
||||
airport_limit = int(airport_response.headers["x-ratelimit-limit"])
|
||||
|
||||
# Scans endpoint (10/min)
|
||||
scan_response = client.post("/api/v1/scans", json={"origin": "BDS", "country": "DE"})
|
||||
scan_limit = int(scan_response.headers["x-ratelimit-limit"])
|
||||
|
||||
# Different limits
|
||||
assert airport_limit > scan_limit
|
||||
assert airport_limit == 100
|
||||
assert scan_limit == 10
|
||||
|
||||
def test_rate_limit_recovery(self, client: TestClient):
|
||||
"""Test that rate limit counter is per-IP and independent."""
|
||||
# Make some requests to airports
|
||||
for i in range(3):
|
||||
client.get("/api/v1/airports?q=MUC")
|
||||
|
||||
# Scans endpoint should have independent counter
|
||||
response = client.post("/api/v1/scans", json={"origin": "BDS", "country": "DE"})
|
||||
remaining = int(response.headers["x-ratelimit-remaining"])
|
||||
|
||||
# Should still have most of scan limit available (10 total, used 1)
|
||||
assert remaining >= 8
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
class TestStartupCleanup:
|
||||
"""Integration tests for startup cleanup behavior."""
|
||||
|
||||
def test_stuck_scans_detection(self, client: TestClient, create_test_scan, clean_database):
|
||||
"""Test that stuck scans are detected."""
|
||||
# Create stuck scan
|
||||
scan_id = create_test_scan(status="running")
|
||||
|
||||
# Verify it's in running state
|
||||
conn = sqlite3.connect(clean_database)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT status FROM scans WHERE id = ?", (scan_id,))
|
||||
status = cursor.fetchone()[0]
|
||||
conn.close()
|
||||
|
||||
assert status == "running"
|
||||
|
||||
# Note: Actual cleanup happens on server restart, tested manually
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
class TestValidationIntegration:
|
||||
"""Integration tests for validation across the system."""
|
||||
|
||||
def test_validation_consistency(self, client: TestClient):
|
||||
"""Test that validation is consistent across endpoints."""
|
||||
# Invalid IATA code
|
||||
response1 = client.post("/api/v1/scans", json={"origin": "TOOLONG", "country": "DE"})
|
||||
assert response1.status_code == 422
|
||||
|
||||
# Invalid date format
|
||||
response2 = client.post("/api/v1/scans", json={
|
||||
"origin": "BDS",
|
||||
"country": "DE",
|
||||
"start_date": "01-04-2026" # Wrong format
|
||||
})
|
||||
assert response2.status_code == 422
|
||||
|
||||
def test_auto_normalization(self, client: TestClient):
|
||||
"""Test that IATA codes are auto-normalized to uppercase."""
|
||||
response = client.post("/api/v1/scans", json={
|
||||
"origin": "bds", # lowercase
|
||||
"country": "de" # lowercase
|
||||
})
|
||||
|
||||
assert response.status_code == 200
|
||||
scan = response.json()["scan"]
|
||||
|
||||
assert scan["origin"] == "BDS" # Uppercased
|
||||
assert scan["country"] == "DE" # Uppercased
|
||||
296
flight-comparator/tests/test_scan_pipeline.py
Normal file
296
flight-comparator/tests/test_scan_pipeline.py
Normal file
@@ -0,0 +1,296 @@
|
||||
"""
|
||||
Integration tests for the full scan pipeline: searcher → processor → database.
|
||||
|
||||
Confirmed flight data is stored in confirmed_flights.json (generated 2026-02-25
|
||||
from a live scan of BDS→FMM,DUS across the full Feb 26 – May 27 2026 window).
|
||||
|
||||
Key confirmed routes:
|
||||
BDS → FMM 39 flights Mar–May 2026 Ryanair ~5-6x/week, two daily slots
|
||||
BDS → DUS 11 flights Apr–May 2026 Eurowings Saturdays only, two time slots
|
||||
|
||||
These tests make real network calls to Google Flights via fast-flights.
|
||||
Mark: integration, slow
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
|
||||
|
||||
from searcher_v3 import search_multiple_routes
|
||||
from scan_processor import process_scan
|
||||
from database import initialize_database
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Load confirmed flight data from JSON fixture
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_FIXTURE_PATH = Path(__file__).parent / "confirmed_flights.json"
|
||||
with open(_FIXTURE_PATH) as _f:
|
||||
CONFIRMED = json.load(_f)
|
||||
|
||||
# (origin, destination, date, min_expected_flights, description)
|
||||
# Built from confirmed_dates_for_testing — each entry is a specific (route, date)
|
||||
# pair that returned ≥1 real flight from the live API.
|
||||
KNOWN_ROUTES = [
|
||||
(
|
||||
e["origin"],
|
||||
e["destination"],
|
||||
e["date"],
|
||||
e["min_flights"],
|
||||
f"{e['origin']}→{e['destination']} {e['airline']} on {e['date']} (confirmed €{e['price']:.0f})",
|
||||
)
|
||||
for e in CONFIRMED["confirmed_dates_for_testing"]["entries"]
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Fixtures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def tmp_db():
|
||||
"""Isolated SQLite database for pipeline tests."""
|
||||
fd, path = tempfile.mkstemp(suffix=".db")
|
||||
os.close(fd)
|
||||
os.environ["DATABASE_PATH"] = path
|
||||
initialize_database(db_path=Path(path), verbose=False)
|
||||
yield path
|
||||
os.environ.pop("DATABASE_PATH", None)
|
||||
try:
|
||||
os.unlink(path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def _insert_scan(db_path, origin, country, start_date, end_date,
|
||||
seat_class="economy", adults=1):
|
||||
"""Insert a pending scan and return its ID."""
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.execute("PRAGMA foreign_keys = ON")
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""INSERT INTO scans (origin, country, start_date, end_date, status, seat_class, adults)
|
||||
VALUES (?, ?, ?, ?, 'pending', ?, ?)""",
|
||||
(origin, country, start_date, end_date, seat_class, adults),
|
||||
)
|
||||
scan_id = cur.lastrowid
|
||||
conn.commit()
|
||||
conn.close()
|
||||
return scan_id
|
||||
|
||||
|
||||
def _get_scan(db_path, scan_id):
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
row = conn.execute("SELECT * FROM scans WHERE id=?", (scan_id,)).fetchone()
|
||||
conn.close()
|
||||
return dict(row) if row else None
|
||||
|
||||
|
||||
def _get_routes(db_path, scan_id):
|
||||
conn = sqlite3.connect(db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
rows = conn.execute(
|
||||
"SELECT * FROM routes WHERE scan_id=?", (scan_id,)
|
||||
).fetchall()
|
||||
conn.close()
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Searcher tests — verify live data comes back for confirmed routes
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestSearcherKnownRoutes:
|
||||
"""
|
||||
Directly test search_multiple_routes() against confirmed real routes.
|
||||
Each test uses a date/route pair we know has flights from our earlier scans.
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.parametrize("origin,dest,date,min_flights,desc", KNOWN_ROUTES)
|
||||
def test_returns_flights_for_confirmed_route(self, origin, dest, date, min_flights, desc):
|
||||
"""Searcher returns ≥min_flights for a confirmed live route."""
|
||||
results = asyncio.run(
|
||||
search_multiple_routes(
|
||||
routes=[(origin, dest, date)],
|
||||
seat_class="economy",
|
||||
adults=1,
|
||||
use_cache=False,
|
||||
max_workers=1,
|
||||
)
|
||||
)
|
||||
|
||||
flights = results.get((origin, dest, date), [])
|
||||
assert len(flights) >= min_flights, (
|
||||
f"{desc}: expected ≥{min_flights} flight(s) on {origin}→{dest} {date}, "
|
||||
f"got {len(flights)}"
|
||||
)
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
def test_flight_has_required_fields(self):
|
||||
"""Every returned flight dict has the mandatory fields."""
|
||||
origin, dest, date = "BDS", "FMM", "2026-04-05"
|
||||
results = asyncio.run(
|
||||
search_multiple_routes(
|
||||
routes=[(origin, dest, date)],
|
||||
seat_class="economy",
|
||||
adults=1,
|
||||
use_cache=False,
|
||||
max_workers=1,
|
||||
)
|
||||
)
|
||||
flights = results.get((origin, dest, date), [])
|
||||
assert flights, f"No flights returned for {origin}→{dest} {date}"
|
||||
|
||||
required = {"origin", "destination", "airline", "departure_time",
|
||||
"arrival_time", "price", "stops"}
|
||||
for flight in flights:
|
||||
missing = required - flight.keys()
|
||||
assert not missing, f"Flight missing fields: {missing}. Got: {flight}"
|
||||
assert flight["stops"] == 0, "Expected direct flight only"
|
||||
assert flight["price"] > 0, "Price must be positive"
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
def test_no_results_for_unknown_route(self):
|
||||
"""Routes with no service return an empty list, not an error."""
|
||||
# BDS → JFK: no direct flight exists
|
||||
results = asyncio.run(
|
||||
search_multiple_routes(
|
||||
routes=[("BDS", "JFK", "2026-04-05")],
|
||||
seat_class="economy",
|
||||
adults=1,
|
||||
use_cache=False,
|
||||
max_workers=1,
|
||||
)
|
||||
)
|
||||
# Should complete without raising; result may be empty or have 0 flights
|
||||
assert ("BDS", "JFK", "2026-04-05") in results
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Pipeline tests — scan processor saves flights to the database
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestScanProcessorSavesRoutes:
|
||||
"""
|
||||
Test that process_scan() correctly saves discovered flights into the
|
||||
routes table. These tests catch the regression where dest_info lookup
|
||||
silently discarded all results.
|
||||
"""
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
def test_airports_mode_saves_routes(self, tmp_db):
|
||||
"""
|
||||
Airports mode (comma-separated in country field) must save routes.
|
||||
|
||||
Regression: after removing get_airport_data() call, destinations=[]
|
||||
caused dest_info to always be None → all routes silently skipped.
|
||||
"""
|
||||
scan_id = _insert_scan(
|
||||
tmp_db,
|
||||
origin="BDS",
|
||||
country="FMM", # single airport in destinations-mode format
|
||||
start_date="2026-04-05",
|
||||
end_date="2026-04-06",
|
||||
)
|
||||
asyncio.run(process_scan(scan_id))
|
||||
|
||||
scan = _get_scan(tmp_db, scan_id)
|
||||
assert scan["status"] == "completed", (
|
||||
f"Scan failed: {scan.get('error_message')}"
|
||||
)
|
||||
|
||||
routes = _get_routes(tmp_db, scan_id)
|
||||
assert len(routes) >= 1, (
|
||||
"No routes saved for BDS→FMM even though Ryanair flies this route"
|
||||
)
|
||||
fmm_route = next(r for r in routes if r["destination"] == "FMM")
|
||||
assert fmm_route["flight_count"] >= 1
|
||||
assert fmm_route["min_price"] > 0
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
def test_airports_mode_unknown_airport_uses_iata_fallback(self, tmp_db):
|
||||
"""
|
||||
When an airport code is not in airports_by_country.json, the route
|
||||
is still saved with the IATA code as its name (not silently dropped).
|
||||
"""
|
||||
scan_id = _insert_scan(
|
||||
tmp_db,
|
||||
origin="BDS",
|
||||
country="FMM",
|
||||
start_date="2026-04-05",
|
||||
end_date="2026-04-06",
|
||||
)
|
||||
asyncio.run(process_scan(scan_id))
|
||||
|
||||
routes = _get_routes(tmp_db, scan_id)
|
||||
for route in routes:
|
||||
# name must be set (IATA code at minimum, not empty/None)
|
||||
assert route["destination_name"], (
|
||||
f"destination_name is empty for route to {route['destination']}"
|
||||
)
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
def test_country_mode_includes_fmm(self, tmp_db):
|
||||
"""
|
||||
Country mode must scan ALL airports, not just the first 20.
|
||||
|
||||
Regression: [:20] alphabetical cut-off excluded FMM (#72 in DE list)
|
||||
and STR (#21), which are among the most active BDS routes.
|
||||
"""
|
||||
scan_id = _insert_scan(
|
||||
tmp_db,
|
||||
origin="BDS",
|
||||
country="DE",
|
||||
start_date="2026-04-05",
|
||||
end_date="2026-04-06",
|
||||
)
|
||||
asyncio.run(process_scan(scan_id))
|
||||
|
||||
scan = _get_scan(tmp_db, scan_id)
|
||||
assert scan["status"] == "completed", scan.get("error_message")
|
||||
|
||||
routes = _get_routes(tmp_db, scan_id)
|
||||
destinations_found = {r["destination"] for r in routes}
|
||||
# FMM and DUS must appear — they have confirmed flights on 2026-04-05
|
||||
assert "FMM" in destinations_found, (
|
||||
f"FMM (Ryanair BDS→FMM) missing from results. Found: {destinations_found}"
|
||||
)
|
||||
|
||||
@pytest.mark.integration
|
||||
@pytest.mark.slow
|
||||
def test_multi_airport_mode_saves_all_routes(self, tmp_db):
|
||||
"""
|
||||
Comma-separated destinations: all airports with flights must be saved.
|
||||
"""
|
||||
scan_id = _insert_scan(
|
||||
tmp_db,
|
||||
origin="BDS",
|
||||
country="FMM,DUS", # two confirmed routes
|
||||
start_date="2026-04-04", # Saturday (DUS) — range extends to Apr 15 (FMM mid-week)
|
||||
end_date="2026-04-16", # captures 2026-04-04 (Sat) AND 2026-04-15 (Wed)
|
||||
)
|
||||
asyncio.run(process_scan(scan_id))
|
||||
|
||||
scan = _get_scan(tmp_db, scan_id)
|
||||
assert scan["status"] == "completed", scan.get("error_message")
|
||||
|
||||
routes = _get_routes(tmp_db, scan_id)
|
||||
destinations_found = {r["destination"] for r in routes}
|
||||
assert "FMM" in destinations_found, "FMM route not saved"
|
||||
assert "DUS" in destinations_found, "DUS route not saved (Saturday flight)"
|
||||
33
flight-comparator/tests/test_searcher.py
Normal file
33
flight-comparator/tests/test_searcher.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""
|
||||
Smoke tests for searcher module.
|
||||
"""
|
||||
|
||||
import sys
|
||||
sys.path.insert(0, '..')
|
||||
|
||||
from searcher import _parse_duration
|
||||
|
||||
|
||||
def test_parse_duration():
|
||||
"""Test duration parsing logic."""
|
||||
assert _parse_duration("9h 30m") == 570
|
||||
assert _parse_duration("9h") == 540
|
||||
assert _parse_duration("90m") == 90
|
||||
assert _parse_duration("10h 15m") == 615
|
||||
assert _parse_duration("") == 0
|
||||
print("✓ Duration parsing works")
|
||||
|
||||
|
||||
def test_parse_duration_edge_cases():
|
||||
"""Test edge cases in duration parsing."""
|
||||
assert _parse_duration("0h 0m") == 0
|
||||
assert _parse_duration("1h 1m") == 61
|
||||
assert _parse_duration("24h") == 1440
|
||||
print("✓ Duration parsing edge cases work")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_parse_duration()
|
||||
test_parse_duration_edge_cases()
|
||||
print("\n✅ All searcher tests passed!")
|
||||
print("ℹ️ Note: Full API integration tests require fast-flights and live network")
|
||||
Reference in New Issue
Block a user