codex
This commit is contained in:
BIN
pricewatch/app/db/migrations/__pycache__/env.cpython-313.pyc
Executable file
BIN
pricewatch/app/db/migrations/__pycache__/env.cpython-313.pyc
Executable file
Binary file not shown.
80
pricewatch/app/db/migrations/env.py
Executable file
80
pricewatch/app/db/migrations/env.py
Executable file
@@ -0,0 +1,80 @@
|
||||
"""
|
||||
Configuration Alembic pour PriceWatch.
|
||||
|
||||
Recupere l'URL DB depuis AppConfig pour garantir un setup coherent.
|
||||
"""
|
||||
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from pricewatch.app.core.config import get_config
|
||||
from pricewatch.app.db.models import Base
|
||||
|
||||
# Alembic Config object
|
||||
config = context.config
|
||||
|
||||
# Configure logging
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Metadata SQLAlchemy pour autogenerate
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def _get_database_url() -> str:
|
||||
"""Construit l'URL DB depuis la config applicative."""
|
||||
app_config = get_config()
|
||||
return app_config.db.url
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""
|
||||
Execute les migrations en mode offline.
|
||||
|
||||
Configure le contexte avec l'URL DB sans creer d'engine.
|
||||
"""
|
||||
url = _get_database_url()
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""
|
||||
Execute les migrations en mode online.
|
||||
|
||||
Cree un engine SQLAlchemy et etablit la connexion.
|
||||
"""
|
||||
configuration = config.get_section(config.config_ini_section) or {}
|
||||
configuration["sqlalchemy.url"] = _get_database_url()
|
||||
|
||||
connectable = engine_from_config(
|
||||
configuration,
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
24
pricewatch/app/db/migrations/script.py.mako
Executable file
24
pricewatch/app/db/migrations/script.py.mako
Executable file
@@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# Revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
124
pricewatch/app/db/migrations/versions/20260114_01_initial_schema.py
Executable file
124
pricewatch/app/db/migrations/versions/20260114_01_initial_schema.py
Executable file
@@ -0,0 +1,124 @@
|
||||
"""Initial schema
|
||||
|
||||
Revision ID: 20260114_01
|
||||
Revises: None
|
||||
Create Date: 2026-01-14 00:00:00
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# Revision identifiers, used by Alembic.
|
||||
revision = "20260114_01"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"products",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
|
||||
sa.Column("source", sa.String(length=50), nullable=False),
|
||||
sa.Column("reference", sa.String(length=100), nullable=False),
|
||||
sa.Column("url", sa.Text(), nullable=False),
|
||||
sa.Column("title", sa.Text(), nullable=True),
|
||||
sa.Column("category", sa.Text(), nullable=True),
|
||||
sa.Column("currency", sa.String(length=3), nullable=True),
|
||||
sa.Column("first_seen_at", sa.TIMESTAMP(), nullable=False),
|
||||
sa.Column("last_updated_at", sa.TIMESTAMP(), nullable=False),
|
||||
sa.UniqueConstraint("source", "reference", name="uq_product_source_reference"),
|
||||
)
|
||||
op.create_index("ix_product_source", "products", ["source"], unique=False)
|
||||
op.create_index("ix_product_reference", "products", ["reference"], unique=False)
|
||||
op.create_index("ix_product_last_updated", "products", ["last_updated_at"], unique=False)
|
||||
|
||||
op.create_table(
|
||||
"price_history",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
|
||||
sa.Column("product_id", sa.Integer(), nullable=False),
|
||||
sa.Column("price", sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column("shipping_cost", sa.Numeric(10, 2), nullable=True),
|
||||
sa.Column("stock_status", sa.String(length=20), nullable=True),
|
||||
sa.Column("fetch_method", sa.String(length=20), nullable=False),
|
||||
sa.Column("fetch_status", sa.String(length=20), nullable=False),
|
||||
sa.Column("fetched_at", sa.TIMESTAMP(), nullable=False),
|
||||
sa.ForeignKeyConstraint(["product_id"], ["products.id"], ondelete="CASCADE"),
|
||||
sa.UniqueConstraint("product_id", "fetched_at", name="uq_price_history_product_time"),
|
||||
sa.CheckConstraint("stock_status IN ('in_stock', 'out_of_stock', 'unknown')"),
|
||||
sa.CheckConstraint("fetch_method IN ('http', 'playwright')"),
|
||||
sa.CheckConstraint("fetch_status IN ('success', 'partial', 'failed')"),
|
||||
)
|
||||
op.create_index("ix_price_history_product_id", "price_history", ["product_id"], unique=False)
|
||||
op.create_index("ix_price_history_fetched_at", "price_history", ["fetched_at"], unique=False)
|
||||
|
||||
op.create_table(
|
||||
"product_images",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
|
||||
sa.Column("product_id", sa.Integer(), nullable=False),
|
||||
sa.Column("image_url", sa.Text(), nullable=False),
|
||||
sa.Column("position", sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(["product_id"], ["products.id"], ondelete="CASCADE"),
|
||||
sa.UniqueConstraint("product_id", "image_url", name="uq_product_image_url"),
|
||||
)
|
||||
op.create_index("ix_product_image_product_id", "product_images", ["product_id"], unique=False)
|
||||
|
||||
op.create_table(
|
||||
"product_specs",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
|
||||
sa.Column("product_id", sa.Integer(), nullable=False),
|
||||
sa.Column("spec_key", sa.String(length=200), nullable=False),
|
||||
sa.Column("spec_value", sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(["product_id"], ["products.id"], ondelete="CASCADE"),
|
||||
sa.UniqueConstraint("product_id", "spec_key", name="uq_product_spec_key"),
|
||||
)
|
||||
op.create_index("ix_product_spec_product_id", "product_specs", ["product_id"], unique=False)
|
||||
op.create_index("ix_product_spec_key", "product_specs", ["spec_key"], unique=False)
|
||||
|
||||
op.create_table(
|
||||
"scraping_logs",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
|
||||
sa.Column("product_id", sa.Integer(), nullable=True),
|
||||
sa.Column("url", sa.Text(), nullable=False),
|
||||
sa.Column("source", sa.String(length=50), nullable=False),
|
||||
sa.Column("reference", sa.String(length=100), nullable=True),
|
||||
sa.Column("fetch_method", sa.String(length=20), nullable=False),
|
||||
sa.Column("fetch_status", sa.String(length=20), nullable=False),
|
||||
sa.Column("fetched_at", sa.TIMESTAMP(), nullable=False),
|
||||
sa.Column("duration_ms", sa.Integer(), nullable=True),
|
||||
sa.Column("html_size_bytes", sa.Integer(), nullable=True),
|
||||
sa.Column("errors", postgresql.JSONB(), nullable=True),
|
||||
sa.Column("notes", postgresql.JSONB(), nullable=True),
|
||||
sa.ForeignKeyConstraint(["product_id"], ["products.id"], ondelete="SET NULL"),
|
||||
sa.CheckConstraint("fetch_method IN ('http', 'playwright')"),
|
||||
sa.CheckConstraint("fetch_status IN ('success', 'partial', 'failed')"),
|
||||
)
|
||||
op.create_index("ix_scraping_log_product_id", "scraping_logs", ["product_id"], unique=False)
|
||||
op.create_index("ix_scraping_log_source", "scraping_logs", ["source"], unique=False)
|
||||
op.create_index("ix_scraping_log_fetched_at", "scraping_logs", ["fetched_at"], unique=False)
|
||||
op.create_index("ix_scraping_log_fetch_status", "scraping_logs", ["fetch_status"], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index("ix_scraping_log_fetch_status", table_name="scraping_logs")
|
||||
op.drop_index("ix_scraping_log_fetched_at", table_name="scraping_logs")
|
||||
op.drop_index("ix_scraping_log_source", table_name="scraping_logs")
|
||||
op.drop_index("ix_scraping_log_product_id", table_name="scraping_logs")
|
||||
op.drop_table("scraping_logs")
|
||||
|
||||
op.drop_index("ix_product_spec_key", table_name="product_specs")
|
||||
op.drop_index("ix_product_spec_product_id", table_name="product_specs")
|
||||
op.drop_table("product_specs")
|
||||
|
||||
op.drop_index("ix_product_image_product_id", table_name="product_images")
|
||||
op.drop_table("product_images")
|
||||
|
||||
op.drop_index("ix_price_history_fetched_at", table_name="price_history")
|
||||
op.drop_index("ix_price_history_product_id", table_name="price_history")
|
||||
op.drop_table("price_history")
|
||||
|
||||
op.drop_index("ix_product_last_updated", table_name="products")
|
||||
op.drop_index("ix_product_reference", table_name="products")
|
||||
op.drop_index("ix_product_source", table_name="products")
|
||||
op.drop_table("products")
|
||||
Binary file not shown.
Reference in New Issue
Block a user