Files
scrap/tests/api/test_scrape_endpoints.py
Gilles Soulier d0b73b9319 codex2
2026-01-14 21:54:55 +01:00

56 lines
1.8 KiB
Python

"""
Tests API preview/commit scraping.
"""
from datetime import datetime
from pricewatch.app.api.main import commit_scrape, preview_scrape
from pricewatch.app.api.schemas import ScrapeCommitRequest, ScrapePreviewRequest
from pricewatch.app.core.schema import DebugInfo, DebugStatus, FetchMethod, ProductSnapshot
def test_preview_scrape_returns_snapshot(monkeypatch):
snapshot = ProductSnapshot(
source="amazon",
url="https://example.com",
title="Produit",
price=9.99,
currency="EUR",
debug=DebugInfo(method=FetchMethod.HTTP, status=DebugStatus.SUCCESS),
)
def fake_scrape(url, use_playwright=None, save_db=False):
return {"success": True, "snapshot": snapshot, "error": None}
monkeypatch.setattr("pricewatch.app.api.main.scrape_product", fake_scrape)
response = preview_scrape(ScrapePreviewRequest(url="https://example.com"))
assert response.success is True
assert response.snapshot["source"] == "amazon"
assert response.snapshot["price"] == 9.99
def test_commit_scrape_persists_snapshot(monkeypatch):
snapshot = ProductSnapshot(
source="amazon",
url="https://example.com",
title="Produit",
price=19.99,
currency="EUR",
fetched_at=datetime(2026, 1, 15, 10, 0, 0),
debug=DebugInfo(method=FetchMethod.HTTP, status=DebugStatus.SUCCESS),
)
class FakePipeline:
def __init__(self, config=None):
self.config = config
def process_snapshot(self, snapshot, save_to_db=True):
return 42
monkeypatch.setattr("pricewatch.app.api.main.ScrapingPipeline", FakePipeline)
response = commit_scrape(ScrapeCommitRequest(snapshot=snapshot.model_dump(mode="json")))
assert response.success is True
assert response.product_id == 42