This commit is contained in:
Gilles Soulier
2026-01-14 21:54:55 +01:00
parent c91c0f1fc9
commit d0b73b9319
140 changed files with 5822 additions and 161 deletions

56
tests/api/test_auth.py Normal file
View File

@@ -0,0 +1,56 @@
"""
Tests auth API.
"""
from dataclasses import dataclass
import pytest
from fastapi import HTTPException
from pricewatch.app.api.main import require_token
@dataclass
class FakeRedisConfig:
url: str
@dataclass
class FakeDbConfig:
url: str
@dataclass
class FakeAppConfig:
db: FakeDbConfig
redis: FakeRedisConfig
api_token: str
def test_missing_token_returns_401(monkeypatch):
"""Sans token, retourne 401."""
config = FakeAppConfig(
db=FakeDbConfig(url="sqlite:///:memory:"),
redis=FakeRedisConfig(url="redis://localhost:6379/0"),
api_token="secret",
)
monkeypatch.setattr("pricewatch.app.api.main.get_config", lambda: config)
with pytest.raises(HTTPException) as excinfo:
require_token(None)
assert excinfo.value.status_code == 401
def test_bad_token_returns_403(monkeypatch):
"""Token invalide retourne 403."""
config = FakeAppConfig(
db=FakeDbConfig(url="sqlite:///:memory:"),
redis=FakeRedisConfig(url="redis://localhost:6379/0"),
api_token="secret",
)
monkeypatch.setattr("pricewatch.app.api.main.get_config", lambda: config)
with pytest.raises(HTTPException) as excinfo:
require_token("Bearer nope")
assert excinfo.value.status_code == 403

View File

@@ -0,0 +1,30 @@
"""
Tests API logs backend.
"""
from pricewatch.app.api.main import BACKEND_LOGS, list_backend_logs, preview_scrape
from pricewatch.app.api.schemas import ScrapePreviewRequest
from pricewatch.app.core.schema import DebugInfo, DebugStatus, FetchMethod, ProductSnapshot
def test_backend_logs_capture_preview(monkeypatch):
BACKEND_LOGS.clear()
snapshot = ProductSnapshot(
source="amazon",
url="https://example.com",
title="Produit",
price=9.99,
currency="EUR",
debug=DebugInfo(method=FetchMethod.HTTP, status=DebugStatus.SUCCESS),
)
def fake_scrape(url, use_playwright=None, save_db=False):
return {"success": True, "snapshot": snapshot, "error": None}
monkeypatch.setattr("pricewatch.app.api.main.scrape_product", fake_scrape)
preview_scrape(ScrapePreviewRequest(url="https://example.com"))
logs = list_backend_logs()
assert logs
assert logs[-1].message.startswith("Preview scraping")

View File

@@ -0,0 +1,239 @@
"""
Tests filtres avances et exports API.
"""
from datetime import datetime, timedelta
import json
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from pricewatch.app.api.main import (
export_logs,
export_prices,
export_products,
list_logs,
list_prices,
list_products,
)
from pricewatch.app.db.models import Base, PriceHistory, Product, ScrapingLog
def _make_session():
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
session = sessionmaker(bind=engine)()
return engine, session
def test_list_products_filters_latest_price_and_stock():
engine, session = _make_session()
try:
product_a = Product(
source="amazon",
reference="REF-A",
url="https://example.com/a",
title="A",
category="Test",
currency="EUR",
first_seen_at=datetime(2026, 1, 14, 10, 0, 0),
last_updated_at=datetime(2026, 1, 15, 9, 0, 0),
)
product_b = Product(
source="amazon",
reference="REF-B",
url="https://example.com/b",
title="B",
category="Test",
currency="EUR",
first_seen_at=datetime(2026, 1, 14, 10, 0, 0),
last_updated_at=datetime(2026, 1, 15, 9, 5, 0),
)
session.add_all([product_a, product_b])
session.commit()
history = [
PriceHistory(
product_id=product_a.id,
price=80,
shipping_cost=0,
stock_status="out_of_stock",
fetch_method="http",
fetch_status="success",
fetched_at=datetime(2026, 1, 15, 8, 0, 0),
),
PriceHistory(
product_id=product_a.id,
price=100,
shipping_cost=0,
stock_status="in_stock",
fetch_method="http",
fetch_status="success",
fetched_at=datetime(2026, 1, 15, 9, 0, 0),
),
PriceHistory(
product_id=product_b.id,
price=200,
shipping_cost=10,
stock_status="in_stock",
fetch_method="http",
fetch_status="success",
fetched_at=datetime(2026, 1, 15, 9, 5, 0),
),
]
session.add_all(history)
session.commit()
filtered = list_products(price_min=150, session=session)
assert len(filtered) == 1
assert filtered[0].reference == "REF-B"
filtered_stock = list_products(stock_status="in_stock", session=session)
assert {item.reference for item in filtered_stock} == {"REF-A", "REF-B"}
finally:
session.close()
engine.dispose()
def test_list_prices_filters():
engine, session = _make_session()
try:
product = Product(
source="amazon",
reference="REF-1",
url="https://example.com/1",
title="Produit",
category="Test",
currency="EUR",
first_seen_at=datetime(2026, 1, 14, 10, 0, 0),
last_updated_at=datetime(2026, 1, 14, 11, 0, 0),
)
session.add(product)
session.commit()
history = [
PriceHistory(
product_id=product.id,
price=50,
shipping_cost=0,
stock_status="in_stock",
fetch_method="http",
fetch_status="success",
fetched_at=datetime(2026, 1, 14, 12, 0, 0),
),
PriceHistory(
product_id=product.id,
price=120,
shipping_cost=0,
stock_status="in_stock",
fetch_method="http",
fetch_status="failed",
fetched_at=datetime(2026, 1, 15, 12, 0, 0),
),
]
session.add_all(history)
session.commit()
results = list_prices(
product_id=product.id,
price_min=100,
fetch_status="failed",
session=session,
)
assert len(results) == 1
assert results[0].price == 120
finally:
session.close()
engine.dispose()
def test_list_logs_filters():
engine, session = _make_session()
try:
now = datetime(2026, 1, 15, 10, 0, 0)
logs = [
ScrapingLog(
product_id=None,
url="https://example.com/a",
source="amazon",
reference="REF-A",
fetch_method="http",
fetch_status="success",
fetched_at=now,
),
ScrapingLog(
product_id=None,
url="https://example.com/b",
source="amazon",
reference="REF-B",
fetch_method="http",
fetch_status="failed",
fetched_at=now - timedelta(hours=2),
),
]
session.add_all(logs)
session.commit()
filtered = list_logs(
fetch_status="success",
fetched_after=now - timedelta(hours=1),
session=session,
)
assert len(filtered) == 1
assert filtered[0].reference == "REF-A"
finally:
session.close()
engine.dispose()
def test_exports_csv_and_json():
engine, session = _make_session()
try:
product = Product(
source="amazon",
reference="REF-EXPORT",
url="https://example.com/export",
title="Export",
category="Test",
currency="EUR",
first_seen_at=datetime(2026, 1, 14, 10, 0, 0),
last_updated_at=datetime(2026, 1, 14, 11, 0, 0),
)
session.add(product)
session.commit()
session.add(
PriceHistory(
product_id=product.id,
price=99,
shipping_cost=0,
stock_status="in_stock",
fetch_method="http",
fetch_status="success",
fetched_at=datetime(2026, 1, 14, 12, 0, 0),
)
)
session.add(
ScrapingLog(
product_id=product.id,
url=product.url,
source=product.source,
reference=product.reference,
fetch_method="http",
fetch_status="success",
fetched_at=datetime(2026, 1, 14, 12, 0, 0),
)
)
session.commit()
csv_response = export_products(format="csv", session=session)
assert csv_response.media_type == "text/csv"
assert "products.csv" in csv_response.headers.get("Content-Disposition", "")
assert "REF-EXPORT" in csv_response.body.decode("utf-8")
json_response = export_logs(format="json", session=session)
payload = json.loads(json_response.body.decode("utf-8"))
assert payload[0]["reference"] == "REF-EXPORT"
finally:
session.close()
engine.dispose()

40
tests/api/test_health.py Normal file
View File

@@ -0,0 +1,40 @@
"""
Tests endpoint /health.
"""
from dataclasses import dataclass
from pricewatch.app.api.main import health_check
@dataclass
class FakeRedisConfig:
url: str
@dataclass
class FakeDbConfig:
url: str
@dataclass
class FakeAppConfig:
db: FakeDbConfig
redis: FakeRedisConfig
api_token: str
def test_health_ok(monkeypatch):
"""Health retourne db/redis true."""
config = FakeAppConfig(
db=FakeDbConfig(url="sqlite:///:memory:"),
redis=FakeRedisConfig(url="redis://localhost:6379/0"),
api_token="secret",
)
monkeypatch.setattr("pricewatch.app.api.main.get_config", lambda: config)
monkeypatch.setattr("pricewatch.app.api.main.check_db_connection", lambda cfg: True)
monkeypatch.setattr("pricewatch.app.api.main.check_redis_connection", lambda url: True)
result = health_check()
assert result.db is True
assert result.redis is True

View File

@@ -0,0 +1,47 @@
"""
Tests HTTP d'integration contre l'API Docker.
"""
import os
import pytest
import httpx
API_BASE = os.getenv("PW_API_BASE", "http://localhost:8001")
API_TOKEN = os.getenv("PW_API_TOKEN", "change_me")
def _client() -> httpx.Client:
return httpx.Client(base_url=API_BASE, timeout=2.0)
def _is_api_up() -> bool:
try:
with _client() as client:
resp = client.get("/health")
return resp.status_code == 200
except Exception:
return False
@pytest.mark.skipif(not _is_api_up(), reason="API Docker indisponible")
def test_health_endpoint():
"""/health repond avec db/redis."""
with _client() as client:
resp = client.get("/health")
assert resp.status_code == 200
payload = resp.json()
assert "db" in payload and "redis" in payload
@pytest.mark.skipif(not _is_api_up(), reason="API Docker indisponible")
def test_products_requires_token():
"""/products demande un token valide."""
with _client() as client:
resp = client.get("/products")
assert resp.status_code == 401
resp = client.get("/products", headers={"Authorization": f"Bearer {API_TOKEN}"})
assert resp.status_code == 200
assert isinstance(resp.json(), list)

View File

@@ -0,0 +1,37 @@
"""
Tests API produits en lecture seule.
"""
from datetime import datetime
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from pricewatch.app.api.main import list_products
from pricewatch.app.db.models import Base, Product
def test_list_products():
"""Liste des produits."""
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
session = sessionmaker(bind=engine)()
product = Product(
source="amazon",
reference="REF1",
url="https://example.com",
title="Produit",
category="Test",
currency="EUR",
first_seen_at=datetime(2026, 1, 14, 16, 0, 0),
last_updated_at=datetime(2026, 1, 14, 16, 0, 0),
)
session.add(product)
session.commit()
data = list_products(session=session, limit=50, offset=0)
assert len(data) == 1
assert data[0].reference == "REF1"
session.close()
engine.dispose()

View File

@@ -0,0 +1,55 @@
"""
Tests API preview/commit scraping.
"""
from datetime import datetime
from pricewatch.app.api.main import commit_scrape, preview_scrape
from pricewatch.app.api.schemas import ScrapeCommitRequest, ScrapePreviewRequest
from pricewatch.app.core.schema import DebugInfo, DebugStatus, FetchMethod, ProductSnapshot
def test_preview_scrape_returns_snapshot(monkeypatch):
snapshot = ProductSnapshot(
source="amazon",
url="https://example.com",
title="Produit",
price=9.99,
currency="EUR",
debug=DebugInfo(method=FetchMethod.HTTP, status=DebugStatus.SUCCESS),
)
def fake_scrape(url, use_playwright=None, save_db=False):
return {"success": True, "snapshot": snapshot, "error": None}
monkeypatch.setattr("pricewatch.app.api.main.scrape_product", fake_scrape)
response = preview_scrape(ScrapePreviewRequest(url="https://example.com"))
assert response.success is True
assert response.snapshot["source"] == "amazon"
assert response.snapshot["price"] == 9.99
def test_commit_scrape_persists_snapshot(monkeypatch):
snapshot = ProductSnapshot(
source="amazon",
url="https://example.com",
title="Produit",
price=19.99,
currency="EUR",
fetched_at=datetime(2026, 1, 15, 10, 0, 0),
debug=DebugInfo(method=FetchMethod.HTTP, status=DebugStatus.SUCCESS),
)
class FakePipeline:
def __init__(self, config=None):
self.config = config
def process_snapshot(self, snapshot, save_to_db=True):
return 42
monkeypatch.setattr("pricewatch.app.api.main.ScrapingPipeline", FakePipeline)
response = commit_scrape(ScrapeCommitRequest(snapshot=snapshot.model_dump(mode="json")))
assert response.success is True
assert response.product_id == 42

View File

@@ -0,0 +1,16 @@
"""
Tests API logs Uvicorn.
"""
from pricewatch.app.api.main import list_uvicorn_logs
def test_list_uvicorn_logs_reads_file(monkeypatch, tmp_path):
log_file = tmp_path / "uvicorn.log"
log_file.write_text("ligne-1\nligne-2\n", encoding="utf-8")
monkeypatch.setattr("pricewatch.app.api.main.UVICORN_LOG_PATH", log_file)
response = list_uvicorn_logs(limit=1)
assert len(response) == 1
assert response[0].line == "ligne-2"

11
tests/api/test_version.py Normal file
View File

@@ -0,0 +1,11 @@
"""
Tests API version.
"""
from pricewatch.app.api.main import version_info
def test_version_info():
"""Retourne la version API."""
response = version_info()
assert response.api_version

View File

@@ -0,0 +1,72 @@
"""
Tests API webhooks.
"""
import pytest
from fastapi import HTTPException
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from pricewatch.app.api.main import (
create_webhook,
delete_webhook,
list_webhooks,
send_webhook_test,
update_webhook,
)
from pricewatch.app.api.schemas import WebhookCreate, WebhookUpdate
from pricewatch.app.db.models import Base
def _make_session():
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
session = sessionmaker(bind=engine)()
return engine, session
def test_webhook_crud_and_test(monkeypatch):
engine, session = _make_session()
try:
payload = WebhookCreate(event="price_changed", url="https://example.com/webhook")
created = create_webhook(payload, session=session)
assert created.id > 0
items = list_webhooks(session=session)
assert len(items) == 1
updated = update_webhook(created.id, WebhookUpdate(enabled=False), session=session)
assert updated.enabled is False
with pytest.raises(HTTPException) as excinfo:
send_webhook_test(created.id, session=session)
assert excinfo.value.status_code == 409
update_webhook(created.id, WebhookUpdate(enabled=True), session=session)
called = {}
def fake_post(url, json, headers, timeout):
called["url"] = url
called["json"] = json
called["headers"] = headers
called["timeout"] = timeout
class FakeResponse:
status_code = 200
def raise_for_status(self):
return None
return FakeResponse()
monkeypatch.setattr("pricewatch.app.api.main.httpx.post", fake_post)
response = send_webhook_test(created.id, session=session)
assert response.status == "sent"
assert called["json"]["event"] == "test"
delete_webhook(created.id, session=session)
assert list_webhooks(session=session) == []
finally:
session.close()
engine.dispose()