pounce/backend/alembic/versions/005_add_auction_tables.py
yves.gugger 88eca582e5 feat: Remove ALL mock data - real scraped data only
MOCK DATA REMOVED:
- Removed ALL hardcoded auction data from auctions.py
- Now uses real-time scraping from ExpiredDomains.net
- Database stores scraped auctions (domain_auctions table)
- Scraping runs hourly via scheduler (:30 each hour)

AUCTION SCRAPER SERVICE:
- Web scraping from ExpiredDomains.net (aggregator)
- Rate limiting per platform (10 req/min)
- Database caching to minimize requests
- Cleanup of ended auctions (auto-deactivate)
- Scrape logging for monitoring

STRIPE INTEGRATION:
- Full payment flow: Checkout → Webhook → Subscription update
- Customer Portal for managing subscriptions
- Price IDs configurable via env vars
- Handles: checkout.completed, subscription.updated/deleted, payment.failed

EMAIL SERVICE (SMTP):
- Beautiful HTML email templates with pounce branding
- Domain available alerts
- Price change notifications
- Subscription confirmations
- Weekly digest emails
- Configurable via SMTP_* env vars

NEW SUBSCRIPTION TIERS:
- Scout (Free): 5 domains, daily checks
- Trader (€19/mo): 50 domains, hourly, portfolio, valuation
- Tycoon (€49/mo): 500+ domains, realtime, API, bulk tools

DATABASE CHANGES:
- domain_auctions table for scraped data
- auction_scrape_logs for monitoring
- stripe_customer_id on users
- stripe_subscription_id on subscriptions
- portfolio_domain relationships fixed

ENV VARS ADDED:
- STRIPE_SECRET_KEY, STRIPE_WEBHOOK_SECRET
- STRIPE_PRICE_TRADER, STRIPE_PRICE_TYCOON
- SMTP_HOST, SMTP_PORT, SMTP_USER, SMTP_PASSWORD
- SMTP_FROM_EMAIL, SMTP_FROM_NAME
2025-12-08 14:08:52 +01:00

112 lines
4.9 KiB
Python

"""Add auction tables for scraped auction data
Revision ID: 005
Revises: 004
Create Date: 2025-12-08
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '005'
down_revision = '004'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create domain_auctions table
op.create_table(
'domain_auctions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('domain', sa.String(length=255), nullable=False),
sa.Column('tld', sa.String(length=50), nullable=False),
sa.Column('platform', sa.String(length=100), nullable=False),
sa.Column('platform_auction_id', sa.String(length=255), nullable=True),
sa.Column('auction_url', sa.Text(), nullable=False),
sa.Column('current_bid', sa.Float(), nullable=False),
sa.Column('currency', sa.String(length=10), nullable=True, default='USD'),
sa.Column('min_bid', sa.Float(), nullable=True),
sa.Column('buy_now_price', sa.Float(), nullable=True),
sa.Column('reserve_price', sa.Float(), nullable=True),
sa.Column('reserve_met', sa.Boolean(), nullable=True),
sa.Column('num_bids', sa.Integer(), nullable=True, default=0),
sa.Column('num_watchers', sa.Integer(), nullable=True),
sa.Column('end_time', sa.DateTime(), nullable=False),
sa.Column('auction_type', sa.String(length=50), nullable=True, default='auction'),
sa.Column('traffic', sa.Integer(), nullable=True),
sa.Column('age_years', sa.Integer(), nullable=True),
sa.Column('backlinks', sa.Integer(), nullable=True),
sa.Column('domain_authority', sa.Integer(), nullable=True),
sa.Column('scraped_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True, default=True),
sa.Column('scrape_source', sa.String(length=100), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# Create indexes for domain_auctions
op.create_index('ix_domain_auctions_domain', 'domain_auctions', ['domain'], unique=False)
op.create_index('ix_domain_auctions_tld', 'domain_auctions', ['tld'], unique=False)
op.create_index('ix_domain_auctions_platform', 'domain_auctions', ['platform'], unique=False)
op.create_index('ix_domain_auctions_end_time', 'domain_auctions', ['end_time'], unique=False)
op.create_index('ix_auctions_platform_domain', 'domain_auctions', ['platform', 'domain'], unique=False)
op.create_index('ix_auctions_end_time_active', 'domain_auctions', ['end_time', 'is_active'], unique=False)
op.create_index('ix_auctions_tld_bid', 'domain_auctions', ['tld', 'current_bid'], unique=False)
# Create auction_scrape_logs table
op.create_table(
'auction_scrape_logs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('platform', sa.String(length=100), nullable=False),
sa.Column('started_at', sa.DateTime(), nullable=True),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('status', sa.String(length=50), nullable=True, default='running'),
sa.Column('auctions_found', sa.Integer(), nullable=True, default=0),
sa.Column('auctions_updated', sa.Integer(), nullable=True, default=0),
sa.Column('auctions_new', sa.Integer(), nullable=True, default=0),
sa.Column('error_message', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# Add stripe_customer_id to users table if not exists
try:
op.add_column('users', sa.Column('stripe_customer_id', sa.String(length=255), nullable=True))
except Exception:
pass # Column might already exist
# Add stripe_subscription_id to subscriptions table if not exists
try:
op.add_column('subscriptions', sa.Column('stripe_subscription_id', sa.String(length=255), nullable=True))
except Exception:
pass # Column might already exist
def downgrade() -> None:
# Drop indexes
op.drop_index('ix_auctions_tld_bid', table_name='domain_auctions')
op.drop_index('ix_auctions_end_time_active', table_name='domain_auctions')
op.drop_index('ix_auctions_platform_domain', table_name='domain_auctions')
op.drop_index('ix_domain_auctions_end_time', table_name='domain_auctions')
op.drop_index('ix_domain_auctions_platform', table_name='domain_auctions')
op.drop_index('ix_domain_auctions_tld', table_name='domain_auctions')
op.drop_index('ix_domain_auctions_domain', table_name='domain_auctions')
# Drop tables
op.drop_table('auction_scrape_logs')
op.drop_table('domain_auctions')
# Remove columns
try:
op.drop_column('users', 'stripe_customer_id')
except Exception:
pass
try:
op.drop_column('subscriptions', 'stripe_subscription_id')
except Exception:
pass