- Add SPA support for Playwright with wait_for_network_idle and extra_wait_ms - Add BaseStore.get_spa_config() and requires_playwright() methods - Implement AliExpress SPA config with JSON price extraction patterns - Fix Amazon price parsing to prioritize whole+fraction combination - Fix AliExpress regex patterns (remove double backslashes) - Add CLI tests: detect, doctor, fetch, parse, run commands - Add API tests: auth, logs, products, scraping_logs, webhooks Tests: 417 passed, 85% coverage Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
136 lines
4.7 KiB
Python
136 lines
4.7 KiB
Python
"""Tests API endpoints scraping logs."""
|
|
|
|
from datetime import datetime
|
|
from unittest.mock import MagicMock
|
|
|
|
import pytest
|
|
from fastapi import HTTPException
|
|
from sqlalchemy.exc import SQLAlchemyError
|
|
|
|
from pricewatch.app.api.main import create_log, update_log, delete_log
|
|
from pricewatch.app.api.schemas import ScrapingLogCreate, ScrapingLogUpdate
|
|
|
|
|
|
class MockScrapingLog:
|
|
"""Mock ScrapingLog model."""
|
|
def __init__(self, **kwargs):
|
|
self.id = kwargs.get("id", 1)
|
|
self.product_id = kwargs.get("product_id")
|
|
self.url = kwargs.get("url", "https://example.com")
|
|
self.source = kwargs.get("source", "amazon")
|
|
self.reference = kwargs.get("reference", "REF123")
|
|
self.fetch_method = kwargs.get("fetch_method", "http")
|
|
self.fetch_status = kwargs.get("fetch_status", "success")
|
|
self.fetched_at = kwargs.get("fetched_at", datetime.now())
|
|
self.duration_ms = kwargs.get("duration_ms", 1500)
|
|
self.html_size_bytes = kwargs.get("html_size_bytes", 50000)
|
|
self.errors = kwargs.get("errors", [])
|
|
self.notes = kwargs.get("notes", [])
|
|
|
|
|
|
class TestCreateLog:
|
|
"""Tests create_log endpoint."""
|
|
|
|
def test_create_log_db_error(self):
|
|
"""Erreur DB lors de creation log leve 500."""
|
|
from unittest.mock import patch
|
|
|
|
session = MagicMock()
|
|
session.add = MagicMock()
|
|
session.commit = MagicMock(side_effect=SQLAlchemyError("error"))
|
|
session.rollback = MagicMock()
|
|
|
|
payload = ScrapingLogCreate(
|
|
url="https://amazon.fr/dp/TEST",
|
|
source="amazon",
|
|
reference="TEST123",
|
|
fetch_method="http",
|
|
fetch_status="success",
|
|
fetched_at=datetime.now(),
|
|
)
|
|
|
|
with patch("pricewatch.app.api.main.ScrapingLog"):
|
|
with pytest.raises(HTTPException) as exc_info:
|
|
create_log(payload, session)
|
|
assert exc_info.value.status_code == 500
|
|
|
|
|
|
class TestUpdateLog:
|
|
"""Tests update_log endpoint."""
|
|
|
|
def test_update_log_not_found(self):
|
|
"""Update log non trouve leve 404."""
|
|
session = MagicMock()
|
|
mock_query = MagicMock()
|
|
mock_query.filter.return_value.one_or_none.return_value = None
|
|
session.query.return_value = mock_query
|
|
|
|
payload = ScrapingLogUpdate(fetch_status="failed")
|
|
|
|
with pytest.raises(HTTPException) as exc_info:
|
|
update_log(99999, payload, session)
|
|
assert exc_info.value.status_code == 404
|
|
|
|
def test_update_log_db_error(self):
|
|
"""Erreur DB lors d'update log leve 500."""
|
|
from unittest.mock import patch
|
|
|
|
session = MagicMock()
|
|
mock_log = MockScrapingLog()
|
|
mock_query = MagicMock()
|
|
mock_query.filter.return_value.one_or_none.return_value = mock_log
|
|
session.query.return_value = mock_query
|
|
session.commit = MagicMock(side_effect=SQLAlchemyError("error"))
|
|
session.rollback = MagicMock()
|
|
|
|
payload = ScrapingLogUpdate(fetch_status="failed")
|
|
|
|
with patch("pricewatch.app.api.main._log_to_out"):
|
|
with pytest.raises(HTTPException) as exc_info:
|
|
update_log(1, payload, session)
|
|
assert exc_info.value.status_code == 500
|
|
|
|
|
|
class TestDeleteLog:
|
|
"""Tests delete_log endpoint."""
|
|
|
|
def test_delete_log_not_found(self):
|
|
"""Delete log non trouve leve 404."""
|
|
session = MagicMock()
|
|
mock_query = MagicMock()
|
|
mock_query.filter.return_value.one_or_none.return_value = None
|
|
session.query.return_value = mock_query
|
|
|
|
with pytest.raises(HTTPException) as exc_info:
|
|
delete_log(99999, session)
|
|
assert exc_info.value.status_code == 404
|
|
|
|
def test_delete_log_success(self):
|
|
"""Delete log avec succes."""
|
|
session = MagicMock()
|
|
mock_log = MockScrapingLog()
|
|
mock_query = MagicMock()
|
|
mock_query.filter.return_value.one_or_none.return_value = mock_log
|
|
session.query.return_value = mock_query
|
|
session.delete = MagicMock()
|
|
session.commit = MagicMock()
|
|
|
|
result = delete_log(1, session)
|
|
assert result == {"status": "deleted"}
|
|
session.delete.assert_called_once()
|
|
|
|
def test_delete_log_db_error(self):
|
|
"""Erreur DB lors de delete log leve 500."""
|
|
session = MagicMock()
|
|
mock_log = MockScrapingLog()
|
|
mock_query = MagicMock()
|
|
mock_query.filter.return_value.one_or_none.return_value = mock_log
|
|
session.query.return_value = mock_query
|
|
session.delete = MagicMock()
|
|
session.commit = MagicMock(side_effect=SQLAlchemyError("error"))
|
|
session.rollback = MagicMock()
|
|
|
|
with pytest.raises(HTTPException) as exc_info:
|
|
delete_log(1, session)
|
|
assert exc_info.value.status_code == 500
|