This commit is contained in:
2026-01-25 14:48:26 +01:00
parent 5c3e6b84a4
commit c56a4632a2
958 changed files with 1149102 additions and 123 deletions

7
.env Normal file
View File

@@ -0,0 +1,7 @@
APP_ENV=development
API_HOST=0.0.0.0
API_PORT=8018
FRONTEND_PORT=8081
LOG_LEVEL=INFO
DATABASE_URL=sqlite:///backend/data/suivi.db
VITE_API_URL=/api

4
.gitignore vendored
View File

@@ -1,8 +1,8 @@
.venv/
__pycache__/
*.pyc
backend/data/
#backend/data/
backend/logs/
frontend/node_modules/
frontend/dist/
.env
#.env

View File

@@ -1,16 +1,34 @@
from __future__ import annotations
import json
import os
import shutil
from datetime import datetime
from pathlib import Path
from fastapi import APIRouter, Body, HTTPException
from fastapi import APIRouter, Body, HTTPException, UploadFile, File
from fastapi.responses import FileResponse
from backend.app.core.config import BackendConfig, CONFIG_PATH, load_config
from backend.app.db.database import DEFAULT_DATABASE_PATH
router = APIRouter(prefix="/config", tags=["config"])
# Chemin vers la config frontend
FRONTEND_CONFIG_PATH = Path(__file__).resolve().parent.parent.parent.parent / "frontend" / "config_frontend.json"
# Chemins possibles vers la config frontend (dev + docker)
FRONTEND_CONFIG_PATH = (
Path(__file__).resolve().parent.parent.parent.parent / "frontend" / "public" / "config_frontend.json"
)
FRONTEND_CONFIG_FALLBACK_PATH = (
Path(__file__).resolve().parent.parent.parent.parent / "backend" / "config_frontend.json"
)
def _get_frontend_config_path() -> Path | None:
if FRONTEND_CONFIG_PATH.exists():
return FRONTEND_CONFIG_PATH
if FRONTEND_CONFIG_FALLBACK_PATH.exists():
return FRONTEND_CONFIG_FALLBACK_PATH
return None
@router.get("/backend", response_model=BackendConfig)
@@ -23,8 +41,21 @@ def read_backend_config() -> BackendConfig:
def update_backend_config(payload: dict = Body(...)) -> BackendConfig:
current = load_config()
try:
# validation via Pydantic avant écriture
updated = current.model_copy(update=payload)
# Fusion profonde des configs (nécessaire pour les modèles imbriqués Pydantic v2)
def deep_merge(base: dict, update: dict) -> dict:
result = base.copy()
for key, value in update.items():
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
result[key] = deep_merge(result[key], value)
else:
result[key] = value
return result
# Convertir en dict, fusionner, puis revalider
current_dict = current.model_dump()
merged = deep_merge(current_dict, payload)
updated = BackendConfig.model_validate(merged)
CONFIG_PATH.write_text(updated.model_dump_json(indent=2), encoding="utf-8")
load_config.cache_clear()
return load_config()
@@ -35,9 +66,10 @@ def update_backend_config(payload: dict = Body(...)) -> BackendConfig:
@router.get("/frontend")
def read_frontend_config() -> dict:
"""Retourne la configuration frontend."""
if not FRONTEND_CONFIG_PATH.exists():
config_path = _get_frontend_config_path()
if not config_path:
raise HTTPException(status_code=404, detail="Config frontend introuvable")
return json.loads(FRONTEND_CONFIG_PATH.read_text(encoding="utf-8"))
return json.loads(config_path.read_text(encoding="utf-8"))
@router.put("/frontend")
@@ -46,8 +78,9 @@ def update_frontend_config(payload: dict = Body(...)) -> dict:
try:
# Charger la config actuelle
current = {}
if FRONTEND_CONFIG_PATH.exists():
current = json.loads(FRONTEND_CONFIG_PATH.read_text(encoding="utf-8"))
config_path = _get_frontend_config_path()
if config_path and config_path.exists():
current = json.loads(config_path.read_text(encoding="utf-8"))
# Fusion profonde des configs
def deep_merge(base: dict, update: dict) -> dict:
@@ -60,19 +93,87 @@ def update_frontend_config(payload: dict = Body(...)) -> dict:
return result
updated = deep_merge(current, payload)
FRONTEND_CONFIG_PATH.write_text(
json.dumps(updated, indent=2, ensure_ascii=False),
encoding="utf-8"
)
target_paths = []
if FRONTEND_CONFIG_PATH.parent.exists():
target_paths.append(FRONTEND_CONFIG_PATH)
if FRONTEND_CONFIG_FALLBACK_PATH.parent.exists():
target_paths.append(FRONTEND_CONFIG_FALLBACK_PATH)
if not target_paths:
target_paths.append(FRONTEND_CONFIG_FALLBACK_PATH)
# Mettre à jour aussi dans public/ pour le frontend dev
public_config = FRONTEND_CONFIG_PATH.parent / "public" / "config_frontend.json"
if public_config.parent.exists():
public_config.write_text(
for target in target_paths:
target.write_text(
json.dumps(updated, indent=2, ensure_ascii=False),
encoding="utf-8"
encoding="utf-8",
)
return updated
except Exception as exc:
raise HTTPException(status_code=400, detail=str(exc))
# ==================== Database Backup ====================
@router.get("/database/info")
def database_info() -> dict:
"""Retourne les informations sur la base de données."""
if not DEFAULT_DATABASE_PATH.exists():
raise HTTPException(status_code=404, detail="Base de données introuvable")
stat = DEFAULT_DATABASE_PATH.stat()
return {
"path": str(DEFAULT_DATABASE_PATH),
"filename": DEFAULT_DATABASE_PATH.name,
"size_bytes": stat.st_size,
"size_mb": round(stat.st_size / (1024 * 1024), 2),
"modified_at": datetime.fromtimestamp(stat.st_mtime).isoformat(),
}
@router.get("/database/backup")
def download_database():
"""Télécharge une copie de la base de données."""
if not DEFAULT_DATABASE_PATH.exists():
raise HTTPException(status_code=404, detail="Base de données introuvable")
# Créer une copie temporaire pour éviter les problèmes de lock
backup_path = DEFAULT_DATABASE_PATH.parent / f"backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}.db"
shutil.copy2(DEFAULT_DATABASE_PATH, backup_path)
return FileResponse(
path=backup_path,
filename=f"suivi_produit_backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}.db",
media_type="application/x-sqlite3",
background=None, # Nettoyer après envoi
)
@router.post("/database/restore")
async def restore_database(file: UploadFile = File(...)) -> dict:
"""Restaure la base de données depuis un fichier uploadé."""
if not file.filename.endswith(".db"):
raise HTTPException(status_code=400, detail="Le fichier doit être un .db")
# Vérifier la taille (max 100MB)
content = await file.read()
if len(content) > 100 * 1024 * 1024:
raise HTTPException(status_code=400, detail="Fichier trop volumineux (max 100MB)")
# Créer un backup avant restauration
if DEFAULT_DATABASE_PATH.exists():
backup_before = DEFAULT_DATABASE_PATH.parent / f"before_restore_{datetime.now().strftime('%Y%m%d_%H%M%S')}.db"
shutil.copy2(DEFAULT_DATABASE_PATH, backup_before)
try:
# Écrire le nouveau fichier
with open(DEFAULT_DATABASE_PATH, "wb") as f:
f.write(content)
return {
"success": True,
"message": "Base de données restaurée avec succès",
"size_bytes": len(content),
}
except Exception as exc:
raise HTTPException(status_code=500, detail=f"Erreur lors de la restauration: {exc}")

View File

@@ -1,6 +1,6 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi import APIRouter, Depends, HTTPException, Response, status
from sqlalchemy.orm import Session
from backend.app.api.deps import get_db
@@ -46,22 +46,25 @@ def update_product(product_id: int, payload: schemas.ProductUpdate, db: Session
return crud.update_product(db, product, payload)
@router.delete("/{product_id}", status_code=status.HTTP_204_NO_CONTENT)
def delete_product(product_id: int, db: Session = Depends(get_db)) -> None:
@router.delete("/{product_id}", status_code=status.HTTP_204_NO_CONTENT, response_model=None)
def delete_product(product_id: int, db: Session = Depends(get_db)) -> Response:
product = crud.get_product(db, product_id)
if not product:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Produit introuvable")
# suppression définitive en base
crud.remove_product(db, product)
return Response(status_code=status.HTTP_204_NO_CONTENT)
@router.get("/{product_id}/snapshots", response_model=list[schemas.ProductSnapshotRead])
def list_snapshots(
product_id: int,
limit: int = 30,
days: int | None = None,
limit: int = 1000,
db: Session = Depends(get_db),
) -> list[schemas.ProductSnapshotRead]:
"""Retourne les snapshots d'un produit, filtrés par nombre de jours."""
product = crud.get_product(db, product_id)
if not product:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Produit introuvable")
return crud.list_snapshots(db, product_id, limit=limit)
return crud.list_snapshots(db, product_id, days=days, limit=limit)

View File

@@ -3,6 +3,7 @@ from __future__ import annotations
from fastapi import APIRouter, BackgroundTasks, HTTPException
from pydantic import BaseModel, HttpUrl
from backend.app.core.scheduler import get_scheduler_status, trigger_next_run
from backend.app.scraper.runner import scrape_all, scrape_preview, scrape_product
router = APIRouter(prefix="/scrape", tags=["scrape"])
@@ -35,3 +36,15 @@ def trigger_single(product_id: int, background_tasks: BackgroundTasks):
def trigger_all(background_tasks: BackgroundTasks):
background_tasks.add_task(scrape_all)
return {"statut": "planifie_tout"}
@router.get("/scheduler/status")
def scheduler_status():
"""Retourne l'état actuel du scheduler de scraping automatique."""
return get_scheduler_status()
@router.post("/scheduler/trigger")
def scheduler_trigger():
"""Force le prochain scrape planifié à s'exécuter maintenant."""
return trigger_next_run()

View File

@@ -0,0 +1,66 @@
"""Endpoint pour les statistiques système."""
from __future__ import annotations
import os
from pathlib import Path
import psutil
from fastapi import APIRouter
from pydantic import BaseModel
router = APIRouter(prefix="/stats", tags=["stats"])
class SystemStats(BaseModel):
"""Statistiques système."""
cpu_percent: float
memory_mb: float
memory_percent: float
data_size_mb: float
def get_directory_size(path: Path) -> int:
"""Calcule la taille totale d'un répertoire en bytes."""
total = 0
if path.exists() and path.is_dir():
for entry in path.rglob("*"):
if entry.is_file():
try:
total += entry.stat().st_size
except (OSError, PermissionError):
pass
return total
@router.get("", response_model=SystemStats)
def get_stats() -> SystemStats:
"""Retourne les statistiques système du backend."""
# CPU et mémoire du process courant
process = psutil.Process(os.getpid())
cpu_percent = process.cpu_percent(interval=0.1)
memory_info = process.memory_info()
memory_mb = memory_info.rss / (1024 * 1024)
# Mémoire système totale pour calculer le pourcentage
total_memory = psutil.virtual_memory().total
memory_percent = (memory_info.rss / total_memory) * 100
# Taille des dossiers data et logs
base_path = Path("/app/backend")
if not base_path.exists():
# Fallback pour le développement local
base_path = Path(__file__).parent.parent.parent
data_path = base_path / "data"
logs_path = base_path / "logs"
data_size = get_directory_size(data_path) + get_directory_size(logs_path)
data_size_mb = data_size / (1024 * 1024)
return SystemStats(
cpu_percent=round(cpu_percent, 1),
memory_mb=round(memory_mb, 1),
memory_percent=round(memory_percent, 1),
data_size_mb=round(data_size_mb, 1),
)

View File

@@ -44,4 +44,4 @@ class BackendConfig(BaseModel):
@lru_cache(maxsize=1)
def load_config() -> BackendConfig:
# on met en cache pour éviter de recharger le fichier à chaque requête
return BackendConfig.parse_file(CONFIG_PATH)
return BackendConfig.model_validate_json(CONFIG_PATH.read_text(encoding="utf-8"))

View File

@@ -1,5 +1,7 @@
from __future__ import annotations
from datetime import datetime, timedelta
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.interval import IntervalTrigger
from loguru import logger
@@ -17,12 +19,49 @@ def start_scheduler() -> None:
config = load_config()
interval = config.scrape.interval_minutes
# Premier run après l'intervalle défini (pas immédiatement au démarrage)
first_run = datetime.now() + timedelta(minutes=interval)
scheduler.add_job(
scrape_all,
trigger=IntervalTrigger(minutes=interval),
id="scheduled-scrape-all",
replace_existing=True,
next_run_time=None,
next_run_time=first_run,
)
scheduler.start()
logger.info("Scheduler démarré avec un intervalle de %s minutes", interval)
logger.info("Scheduler démarré avec un intervalle de {} minutes (prochain run: {})", interval, first_run.strftime("%H:%M:%S"))
def get_scheduler_status() -> dict:
"""Retourne l'état actuel du scheduler."""
job = scheduler.get_job("scheduled-scrape-all")
config = load_config()
status = {
"running": scheduler.running,
"interval_minutes": config.scrape.interval_minutes,
"job_exists": job is not None,
"next_run_time": None,
"next_run_in_minutes": None,
}
if job and job.next_run_time:
status["next_run_time"] = job.next_run_time.isoformat()
# Calculer le temps restant
now = datetime.now(job.next_run_time.tzinfo)
delta = job.next_run_time - now
status["next_run_in_minutes"] = round(delta.total_seconds() / 60, 1)
return status
def trigger_next_run() -> dict:
"""Force le prochain scrape à s'exécuter maintenant."""
job = scheduler.get_job("scheduled-scrape-all")
if not job:
return {"success": False, "error": "Job non trouvé"}
# Modifier le job pour s'exécuter maintenant
scheduler.modify_job("scheduled-scrape-all", next_run_time=datetime.now())
logger.info("Prochain scrape programmé pour maintenant")
return {"success": True, "message": "Scrape programmé pour exécution immédiate"}

View File

@@ -63,6 +63,7 @@ def create_product_with_snapshot(
"description",
"carateristique",
"details",
"categorie_amazon",
]
snapshot_data = {k: data_dict.pop(k) for k in snapshot_fields if k in data_dict}
@@ -118,15 +119,23 @@ def remove_product(db: Session, product: models.Product) -> None:
db.commit()
def list_snapshots(db: Session, product_id: int, limit: int = 30) -> list[models.ProductSnapshot]:
return (
db.query(models.ProductSnapshot)
.filter(models.ProductSnapshot.produit_id == product_id)
.order_by(models.ProductSnapshot.scrape_le.desc())
.limit(limit)
.all()
def list_snapshots(
db: Session, product_id: int, days: int | None = None, limit: int = 1000
) -> list[models.ProductSnapshot]:
"""Retourne les snapshots d'un produit, filtrés par nombre de jours si spécifié."""
from datetime import datetime, timedelta
query = db.query(models.ProductSnapshot).filter(
models.ProductSnapshot.produit_id == product_id
)
# Filtrer par date si days est spécifié
if days is not None and days > 0:
cutoff_date = datetime.utcnow() - timedelta(days=days)
query = query.filter(models.ProductSnapshot.scrape_le >= cutoff_date)
return query.order_by(models.ProductSnapshot.scrape_le.desc()).limit(limit).all()
def get_latest_snapshot(db: Session, product_id: int) -> models.ProductSnapshot | None:
return (
@@ -215,6 +224,7 @@ def _enrich_product_with_snapshot(db: Session, product: models.Product) -> dict:
"description": snapshot.description,
"carateristique": carateristique,
"details": details,
"categorie_amazon": snapshot.categorie_amazon,
"dernier_scrape": snapshot.scrape_le,
"statut_scrap": snapshot.statut_scrap,
}

View File

@@ -24,7 +24,7 @@ class Product(Base):
cree_le = Column(DateTime, default=datetime.utcnow)
modifie_le = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
snapshots = relationship("ProductSnapshot", back_populates="product")
snapshots = relationship("ProductSnapshot", back_populates="product", cascade="all, delete-orphan")
class ScrapeRun(Base):
@@ -67,6 +67,7 @@ class ProductSnapshot(Base):
description = Column(Text, nullable=True)
carateristique = Column(Text, nullable=True) # JSON object
details = Column(Text, nullable=True) # JSON object
categorie_amazon = Column(Text, nullable=True) # Catégorie depuis breadcrumb Amazon
chemin_json_brut = Column(Text, nullable=True)
statut_scrap = Column(String(32), default="ok")
message_erreur = Column(Text, nullable=True)

View File

@@ -1,9 +1,10 @@
from __future__ import annotations
import json
from datetime import datetime
from typing import Any, Optional
from pydantic import BaseModel, HttpUrl
from pydantic import BaseModel, ConfigDict, HttpUrl, field_validator
class ProductBase(BaseModel):
@@ -30,13 +31,12 @@ class ProductUpdate(BaseModel):
class ProductRead(ProductBase):
model_config = ConfigDict(from_attributes=True)
id: int
cree_le: datetime
modifie_le: datetime
class Config:
orm_mode = True
class ProductSnapshotBase(BaseModel):
prix_actuel: Optional[float]
@@ -55,22 +55,48 @@ class ProductSnapshotBase(BaseModel):
description: Optional[str] = None
carateristique: Optional[dict[str, Any]] = None
details: Optional[dict[str, Any]] = None
categorie_amazon: Optional[str] = None
statut_scrap: Optional[str]
message_erreur: Optional[str]
class ProductSnapshotRead(ProductSnapshotBase):
model_config = ConfigDict(from_attributes=True)
id: int
produit_id: int
scrape_le: datetime
class Config:
orm_mode = True
@field_validator("a_propos", mode="before")
@classmethod
def parse_a_propos(cls, v: Any) -> list[str] | None:
if v is None:
return None
if isinstance(v, str):
try:
return json.loads(v)
except json.JSONDecodeError:
return None
return v
@field_validator("carateristique", "details", mode="before")
@classmethod
def parse_json_dict(cls, v: Any) -> dict[str, Any] | None:
if v is None:
return None
if isinstance(v, str):
try:
return json.loads(v)
except json.JSONDecodeError:
return None
return v
class ProductWithSnapshot(ProductBase):
"""Produit enrichi avec les données du dernier snapshot."""
model_config = ConfigDict(from_attributes=True)
id: int
cree_le: datetime
modifie_le: datetime
@@ -92,12 +118,10 @@ class ProductWithSnapshot(ProductBase):
description: Optional[str] = None
carateristique: Optional[dict[str, Any]] = None
details: Optional[dict[str, Any]] = None
categorie_amazon: Optional[str] = None
dernier_scrape: Optional[datetime] = None
statut_scrap: Optional[str] = None
class Config:
orm_mode = True
class ProductCreateWithSnapshot(ProductBase):
"""Création d'un produit avec données de snapshot initiales (depuis preview)."""
@@ -119,3 +143,4 @@ class ProductCreateWithSnapshot(ProductBase):
description: Optional[str] = None
carateristique: Optional[dict[str, Any]] = None
details: Optional[dict[str, Any]] = None
categorie_amazon: Optional[str] = None

View File

@@ -6,7 +6,7 @@ from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from dotenv import load_dotenv
from backend.app.api import routes_config, routes_debug, routes_products, routes_scrape
from backend.app.api import routes_config, routes_debug, routes_products, routes_scrape, routes_stats
from backend.app.core.logging import logger
from backend.app.core.scheduler import start_scheduler
from backend.app.db.database import Base, engine
@@ -15,19 +15,26 @@ load_dotenv()
app = FastAPI(title="suivi_produit")
app_env = getenv("APP_ENV", "development")
# CORS pour le frontend
app.add_middleware(
CORSMiddleware,
allow_origins=["http://localhost:5173", "http://127.0.0.1:5173"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
cors_kwargs = {
"allow_credentials": True,
"allow_methods": ["*"],
"allow_headers": ["*"],
}
if app_env == "development":
cors_kwargs["allow_origin_regex"] = r"https?://(localhost|127\\.0\\.0\\.1|10\\.0\\.1\\.109)(:\\d+)?"
else:
cors_kwargs["allow_origins"] = ["http://localhost:5173", "http://127.0.0.1:5173"]
app.add_middleware(CORSMiddleware, **cors_kwargs)
app.include_router(routes_products.router)
app.include_router(routes_scrape.router)
app.include_router(routes_config.router)
app.include_router(routes_debug.router)
app.include_router(routes_stats.router)
@app.on_event("startup")

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

Some files were not shown because too many files have changed in this diff Show More