pounce/backend/app/db_migrations.py
Yves Gugger bb7ce97330
Some checks failed
CI / Frontend Lint & Type Check (push) Has been cancelled
CI / Frontend Build (push) Has been cancelled
CI / Backend Lint (push) Has been cancelled
CI / Backend Tests (push) Has been cancelled
CI / Docker Build (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
Deploy / Build & Push Images (push) Has been cancelled
Deploy / Deploy to Server (push) Has been cancelled
Deploy / Notify (push) Has been cancelled
Deploy: referral rewards antifraud + legal contact updates
2025-12-15 13:56:43 +01:00

372 lines
16 KiB
Python

"""
Lightweight, idempotent DB migrations.
This project historically used `Base.metadata.create_all()` for bootstrapping new installs.
That does NOT handle schema evolution on existing databases. For performance-related changes
(indexes, new optional columns), we apply additive migrations on startup.
Important:
- Only additive changes (ADD COLUMN / CREATE INDEX) should live here.
- Operations must be idempotent (safe to run on every startup).
"""
from __future__ import annotations
import logging
from typing import Any
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncConnection
logger = logging.getLogger(__name__)
async def _sqlite_table_exists(conn: AsyncConnection, table: str) -> bool:
res = await conn.execute(
text("SELECT 1 FROM sqlite_master WHERE type='table' AND name=:name LIMIT 1"),
{"name": table},
)
return res.scalar() is not None
async def _sqlite_has_column(conn: AsyncConnection, table: str, column: str) -> bool:
res = await conn.execute(text(f"PRAGMA table_info({table})"))
rows = res.fetchall()
# PRAGMA table_info: (cid, name, type, notnull, dflt_value, pk)
return any(r[1] == column for r in rows)
async def _postgres_table_exists(conn: AsyncConnection, table: str) -> bool:
# to_regclass returns NULL if the relation does not exist
res = await conn.execute(text("SELECT to_regclass(:name)"), {"name": table})
return res.scalar() is not None
async def _postgres_has_column(conn: AsyncConnection, table: str, column: str) -> bool:
res = await conn.execute(
text(
"""
SELECT 1
FROM information_schema.columns
WHERE table_schema = current_schema()
AND table_name = :table
AND column_name = :column
LIMIT 1
"""
),
{"table": table, "column": column},
)
return res.scalar() is not None
async def _table_exists(conn: AsyncConnection, table: str) -> bool:
dialect = conn.engine.dialect.name
if dialect == "sqlite":
return await _sqlite_table_exists(conn, table)
return await _postgres_table_exists(conn, table)
async def _has_column(conn: AsyncConnection, table: str, column: str) -> bool:
dialect = conn.engine.dialect.name
if dialect == "sqlite":
return await _sqlite_has_column(conn, table, column)
return await _postgres_has_column(conn, table, column)
async def apply_migrations(conn: AsyncConnection) -> None:
"""
Apply idempotent migrations.
Called on startup after `create_all()` to keep existing DBs up-to-date.
"""
dialect = conn.engine.dialect.name
logger.info("DB migrations: starting (dialect=%s)", dialect)
# ------------------------------------------------------------------
# 1) domain_auctions.pounce_score (enables DB-level sorting/pagination)
# ------------------------------------------------------------------
if await _table_exists(conn, "domain_auctions"):
if not await _has_column(conn, "domain_auctions", "pounce_score"):
logger.info("DB migrations: adding column domain_auctions.pounce_score")
await conn.execute(text("ALTER TABLE domain_auctions ADD COLUMN pounce_score INTEGER"))
# Index for feed ordering
await conn.execute(
text("CREATE INDEX IF NOT EXISTS ix_domain_auctions_pounce_score ON domain_auctions(pounce_score)")
)
# ---------------------------------------------------------
# 2) domain_checks index for history queries (watchlist UI)
# ---------------------------------------------------------
if await _table_exists(conn, "domain_checks"):
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_domain_checks_domain_id_checked_at "
"ON domain_checks(domain_id, checked_at)"
)
)
# ---------------------------------------------------
# 3) tld_prices composite index for trend computations
# ---------------------------------------------------
if await _table_exists(conn, "tld_prices"):
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_tld_prices_tld_registrar_recorded_at "
"ON tld_prices(tld, registrar, recorded_at)"
)
)
# ----------------------------------------------------
# 4) domain_listings pounce_score index (market sorting)
# ----------------------------------------------------
if await _table_exists(conn, "domain_listings"):
if not await _has_column(conn, "domain_listings", "sold_at"):
logger.info("DB migrations: adding column domain_listings.sold_at")
await conn.execute(text("ALTER TABLE domain_listings ADD COLUMN sold_at DATETIME"))
if not await _has_column(conn, "domain_listings", "sold_reason"):
logger.info("DB migrations: adding column domain_listings.sold_reason")
await conn.execute(text("ALTER TABLE domain_listings ADD COLUMN sold_reason VARCHAR(200)"))
if not await _has_column(conn, "domain_listings", "sold_price"):
logger.info("DB migrations: adding column domain_listings.sold_price")
await conn.execute(text("ALTER TABLE domain_listings ADD COLUMN sold_price FLOAT"))
if not await _has_column(conn, "domain_listings", "sold_currency"):
logger.info("DB migrations: adding column domain_listings.sold_currency")
await conn.execute(text("ALTER TABLE domain_listings ADD COLUMN sold_currency VARCHAR(3)"))
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_domain_listings_pounce_score "
"ON domain_listings(pounce_score)"
)
)
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_domain_listings_status "
"ON domain_listings(status)"
)
)
# ----------------------------------------------------
# 4b) listing_inquiries: deal workflow + audit trail
# ----------------------------------------------------
if await _table_exists(conn, "listing_inquiries"):
if not await _has_column(conn, "listing_inquiries", "buyer_user_id"):
logger.info("DB migrations: adding column listing_inquiries.buyer_user_id")
await conn.execute(text("ALTER TABLE listing_inquiries ADD COLUMN buyer_user_id INTEGER"))
if not await _has_column(conn, "listing_inquiries", "closed_at"):
logger.info("DB migrations: adding column listing_inquiries.closed_at")
await conn.execute(text("ALTER TABLE listing_inquiries ADD COLUMN closed_at DATETIME"))
if not await _has_column(conn, "listing_inquiries", "closed_reason"):
logger.info("DB migrations: adding column listing_inquiries.closed_reason")
await conn.execute(text("ALTER TABLE listing_inquiries ADD COLUMN closed_reason VARCHAR(200)"))
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_listing_inquiries_listing_created "
"ON listing_inquiries(listing_id, created_at)"
)
)
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_listing_inquiries_listing_status "
"ON listing_inquiries(listing_id, status)"
)
)
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_listing_inquiries_buyer_user "
"ON listing_inquiries(buyer_user_id)"
)
)
# The table itself is created by `Base.metadata.create_all()` on startup.
# Here we only add indexes (idempotent) for existing DBs.
if await _table_exists(conn, "listing_inquiry_events"):
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_listing_inquiry_events_inquiry_created "
"ON listing_inquiry_events(inquiry_id, created_at)"
)
)
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_listing_inquiry_events_listing_created "
"ON listing_inquiry_events(listing_id, created_at)"
)
)
if await _table_exists(conn, "listing_inquiry_messages"):
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_listing_inquiry_messages_inquiry_created "
"ON listing_inquiry_messages(inquiry_id, created_at)"
)
)
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_listing_inquiry_messages_listing_created "
"ON listing_inquiry_messages(listing_id, created_at)"
)
)
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_listing_inquiry_messages_sender_created "
"ON listing_inquiry_messages(sender_user_id, created_at)"
)
)
# ----------------------------------------------------
# 5) Yield tables indexes
# ----------------------------------------------------
if await _table_exists(conn, "yield_domains"):
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_yield_domains_user_status "
"ON yield_domains(user_id, status)"
)
)
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_yield_domains_domain "
"ON yield_domains(domain)"
)
)
if not await _has_column(conn, "yield_domains", "connected_at"):
logger.info("DB migrations: adding column yield_domains.connected_at")
await conn.execute(text("ALTER TABLE yield_domains ADD COLUMN connected_at DATETIME"))
if await _table_exists(conn, "yield_transactions"):
if not await _has_column(conn, "yield_transactions", "click_id"):
logger.info("DB migrations: adding column yield_transactions.click_id")
await conn.execute(text("ALTER TABLE yield_transactions ADD COLUMN click_id VARCHAR(64)"))
await conn.execute(text("CREATE INDEX IF NOT EXISTS ix_yield_transactions_click_id ON yield_transactions(click_id)"))
if not await _has_column(conn, "yield_transactions", "destination_url"):
logger.info("DB migrations: adding column yield_transactions.destination_url")
await conn.execute(text("ALTER TABLE yield_transactions ADD COLUMN destination_url TEXT"))
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_yield_tx_domain_created "
"ON yield_transactions(yield_domain_id, created_at)"
)
)
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_yield_tx_status_created "
"ON yield_transactions(status, created_at)"
)
)
if await _table_exists(conn, "yield_payouts"):
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_yield_payouts_user_status "
"ON yield_payouts(user_id, status)"
)
)
# ----------------------------------------------------
# 6) Referral rewards: subscriptions.referral_bonus_domains (3C.2)
# ----------------------------------------------------
if await _table_exists(conn, "subscriptions"):
if not await _has_column(conn, "subscriptions", "referral_bonus_domains"):
logger.info("DB migrations: adding column subscriptions.referral_bonus_domains")
await conn.execute(
text(
"ALTER TABLE subscriptions "
"ADD COLUMN referral_bonus_domains INTEGER NOT NULL DEFAULT 0"
)
)
# ----------------------------------------------------
# 6) Telemetry events indexes
# ----------------------------------------------------
if await _table_exists(conn, "telemetry_events"):
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_telemetry_event_name_created "
"ON telemetry_events(event_name, created_at)"
)
)
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_telemetry_user_created "
"ON telemetry_events(user_id, created_at)"
)
)
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_telemetry_listing_created "
"ON telemetry_events(listing_id, created_at)"
)
)
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_telemetry_yield_created "
"ON telemetry_events(yield_domain_id, created_at)"
)
)
# ----------------------------------------------------
# 6b) Ops alert events (persisted cooldown + history)
# ----------------------------------------------------
# NOTE: Table is created by Base.metadata.create_all() for new installs.
# Here we ensure indexes exist for older DBs.
if await _table_exists(conn, "ops_alert_events"):
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_ops_alert_key_created "
"ON ops_alert_events(alert_key, created_at)"
)
)
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_ops_alert_status_created "
"ON ops_alert_events(status, created_at)"
)
)
# ----------------------------------------------------
# 7) User referral tracking columns
# ----------------------------------------------------
if await _table_exists(conn, "users"):
if not await _has_column(conn, "users", "referred_by_user_id"):
logger.info("DB migrations: adding column users.referred_by_user_id")
await conn.execute(text("ALTER TABLE users ADD COLUMN referred_by_user_id INTEGER"))
if not await _has_column(conn, "users", "referred_by_domain"):
logger.info("DB migrations: adding column users.referred_by_domain")
await conn.execute(text("ALTER TABLE users ADD COLUMN referred_by_domain VARCHAR(255)"))
if not await _has_column(conn, "users", "referral_code"):
logger.info("DB migrations: adding column users.referral_code")
await conn.execute(text("ALTER TABLE users ADD COLUMN referral_code VARCHAR(100)"))
if not await _has_column(conn, "users", "invite_code"):
logger.info("DB migrations: adding column users.invite_code")
await conn.execute(text("ALTER TABLE users ADD COLUMN invite_code VARCHAR(32)"))
# Unique index for invite_code (SQLite + Postgres)
await conn.execute(text("CREATE UNIQUE INDEX IF NOT EXISTS ix_users_invite_code ON users(invite_code)"))
# ----------------------------------------------------
# 7) Portfolio DNS verification columns
# ----------------------------------------------------
if await _table_exists(conn, "portfolio_domains"):
if not await _has_column(conn, "portfolio_domains", "is_dns_verified"):
logger.info("DB migrations: adding column portfolio_domains.is_dns_verified")
await conn.execute(text("ALTER TABLE portfolio_domains ADD COLUMN is_dns_verified BOOLEAN DEFAULT 0"))
if not await _has_column(conn, "portfolio_domains", "verification_status"):
logger.info("DB migrations: adding column portfolio_domains.verification_status")
await conn.execute(text("ALTER TABLE portfolio_domains ADD COLUMN verification_status VARCHAR(50) DEFAULT 'unverified'"))
if not await _has_column(conn, "portfolio_domains", "verification_code"):
logger.info("DB migrations: adding column portfolio_domains.verification_code")
await conn.execute(text("ALTER TABLE portfolio_domains ADD COLUMN verification_code VARCHAR(100)"))
if not await _has_column(conn, "portfolio_domains", "verification_started_at"):
logger.info("DB migrations: adding column portfolio_domains.verification_started_at")
await conn.execute(text("ALTER TABLE portfolio_domains ADD COLUMN verification_started_at DATETIME"))
if not await _has_column(conn, "portfolio_domains", "verified_at"):
logger.info("DB migrations: adding column portfolio_domains.verified_at")
await conn.execute(text("ALTER TABLE portfolio_domains ADD COLUMN verified_at DATETIME"))
logger.info("DB migrations: done")