feat: add TLD price export/import/seed scripts
- export_tld_prices.py: Export DB prices to JSON - import_tld_prices.py: Import prices from JSON - seed_tld_prices.py: Initial scrape from Porkbun API - tld_prices_export.json: Current price data (886 TLDs) Usage: python scripts/seed_tld_prices.py # Initial scrape python scripts/export_tld_prices.py # Backup to JSON python scripts/import_tld_prices.py # Restore from JSON
This commit is contained in:
54
backend/scripts/export_tld_prices.py
Normal file
54
backend/scripts/export_tld_prices.py
Normal file
@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Export TLD prices from database to JSON file for backup/versioning."""
|
||||
import asyncio
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from sqlalchemy import select
|
||||
from app.database import AsyncSessionLocal, init_db
|
||||
from app.models.tld_price import TLDPrice
|
||||
|
||||
|
||||
async def export_prices():
|
||||
"""Export all TLD prices to JSON."""
|
||||
await init_db()
|
||||
|
||||
async with AsyncSessionLocal() as db:
|
||||
result = await db.execute(select(TLDPrice).order_by(TLDPrice.tld))
|
||||
prices = result.scalars().all()
|
||||
|
||||
data = {
|
||||
"exported_at": datetime.utcnow().isoformat(),
|
||||
"total_records": len(prices),
|
||||
"prices": [
|
||||
{
|
||||
"tld": p.tld,
|
||||
"registrar": p.registrar,
|
||||
"registration_price": p.registration_price,
|
||||
"renewal_price": p.renewal_price,
|
||||
"transfer_price": p.transfer_price,
|
||||
"currency": p.currency,
|
||||
"promo_price": p.promo_price,
|
||||
"recorded_at": p.recorded_at.isoformat() if p.recorded_at else None,
|
||||
}
|
||||
for p in prices
|
||||
]
|
||||
}
|
||||
|
||||
# Save to file
|
||||
output_path = Path(__file__).parent / "tld_prices_export.json"
|
||||
with open(output_path, "w") as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
print(f"Exported {len(prices)} TLD prices to {output_path}")
|
||||
return output_path
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(export_prices())
|
||||
|
||||
62
backend/scripts/import_tld_prices.py
Normal file
62
backend/scripts/import_tld_prices.py
Normal file
@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Import TLD prices from JSON file to database."""
|
||||
import asyncio
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from app.database import AsyncSessionLocal, init_db
|
||||
from app.models.tld_price import TLDPrice
|
||||
|
||||
|
||||
async def import_prices(json_path: str = None):
|
||||
"""Import TLD prices from JSON."""
|
||||
await init_db()
|
||||
|
||||
# Default path
|
||||
if json_path is None:
|
||||
json_path = Path(__file__).parent / "tld_prices_export.json"
|
||||
else:
|
||||
json_path = Path(json_path)
|
||||
|
||||
if not json_path.exists():
|
||||
print(f"File not found: {json_path}")
|
||||
return
|
||||
|
||||
with open(json_path) as f:
|
||||
data = json.load(f)
|
||||
|
||||
prices = data.get("prices", [])
|
||||
print(f"Importing {len(prices)} TLD prices from {json_path}")
|
||||
|
||||
async with AsyncSessionLocal() as db:
|
||||
imported = 0
|
||||
for p in prices:
|
||||
try:
|
||||
price = TLDPrice(
|
||||
tld=p["tld"],
|
||||
registrar=p["registrar"],
|
||||
registration_price=p["registration_price"],
|
||||
renewal_price=p.get("renewal_price"),
|
||||
transfer_price=p.get("transfer_price"),
|
||||
currency=p.get("currency", "USD"),
|
||||
promo_price=p.get("promo_price"),
|
||||
recorded_at=datetime.fromisoformat(p["recorded_at"]) if p.get("recorded_at") else datetime.utcnow(),
|
||||
)
|
||||
db.add(price)
|
||||
imported += 1
|
||||
except Exception as e:
|
||||
print(f"Error importing {p.get('tld')}: {e}")
|
||||
|
||||
await db.commit()
|
||||
print(f"Successfully imported {imported} TLD prices")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
json_file = sys.argv[1] if len(sys.argv) > 1 else None
|
||||
asyncio.run(import_prices(json_file))
|
||||
|
||||
46
backend/scripts/seed_tld_prices.py
Normal file
46
backend/scripts/seed_tld_prices.py
Normal file
@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Seed TLD prices by running a scrape from Porkbun API."""
|
||||
import asyncio
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from app.database import AsyncSessionLocal, init_db
|
||||
from app.services.tld_scraper.aggregator import tld_aggregator
|
||||
|
||||
|
||||
async def seed_prices():
|
||||
"""Run initial TLD price scrape."""
|
||||
print("Initializing database...")
|
||||
await init_db()
|
||||
|
||||
print("Starting TLD price scrape from Porkbun API...")
|
||||
print("This may take 15-30 seconds...")
|
||||
|
||||
async with AsyncSessionLocal() as db:
|
||||
result = await tld_aggregator.run_scrape(db)
|
||||
|
||||
print()
|
||||
print("=" * 50)
|
||||
print("Seed Results:")
|
||||
print(f" Status: {result.status}")
|
||||
print(f" TLDs scraped: {result.tlds_scraped}")
|
||||
print(f" Prices saved: {result.prices_saved}")
|
||||
print(f" Sources: {result.sources_succeeded}/{result.sources_attempted}")
|
||||
|
||||
if result.errors:
|
||||
print(f" Errors: {result.errors}")
|
||||
|
||||
print("=" * 50)
|
||||
|
||||
if result.status == "success":
|
||||
print("\n✅ TLD prices seeded successfully!")
|
||||
else:
|
||||
print("\n⚠️ Seed completed with warnings")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(seed_prices())
|
||||
|
||||
8866
backend/scripts/tld_prices_export.json
Normal file
8866
backend/scripts/tld_prices_export.json
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user