New features: - Email service for domain availability alerts - Price tracker for detecting significant price changes (>5%) - Automated email notifications for: - Domain becomes available - TLD price changes - Weekly digest summaries - New scheduler job for price change alerts (04:00 UTC) Updated documentation: - README: email notifications section, new services - DEPLOYMENT: email setup, troubleshooting, scheduler jobs New files: - backend/app/services/email_service.py - backend/app/services/price_tracker.py
229 lines
7.5 KiB
Python
229 lines
7.5 KiB
Python
"""Background scheduler for domain checks, TLD price scraping, and notifications."""
|
|
import asyncio
|
|
import logging
|
|
from datetime import datetime
|
|
|
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
|
from apscheduler.triggers.cron import CronTrigger
|
|
from sqlalchemy import select
|
|
|
|
from app.config import get_settings
|
|
from app.database import AsyncSessionLocal
|
|
from app.models.domain import Domain, DomainCheck
|
|
from app.models.user import User
|
|
from app.services.domain_checker import domain_checker
|
|
from app.services.email_service import email_service
|
|
from app.services.price_tracker import price_tracker
|
|
|
|
logger = logging.getLogger(__name__)
|
|
settings = get_settings()
|
|
|
|
# Global scheduler instance
|
|
scheduler = AsyncIOScheduler()
|
|
|
|
|
|
async def scrape_tld_prices():
|
|
"""Scheduled task to scrape TLD prices from public sources."""
|
|
from app.services.tld_scraper.aggregator import tld_aggregator
|
|
|
|
logger.info("Starting scheduled TLD price scrape...")
|
|
|
|
try:
|
|
async with AsyncSessionLocal() as db:
|
|
result = await tld_aggregator.run_scrape(db)
|
|
|
|
logger.info(
|
|
f"TLD scrape completed: {result.status}, "
|
|
f"{result.tlds_scraped} TLDs, {result.prices_saved} prices saved"
|
|
)
|
|
|
|
if result.errors:
|
|
logger.warning(f"Scrape errors: {result.errors}")
|
|
|
|
except Exception as e:
|
|
logger.exception(f"TLD price scrape failed: {e}")
|
|
|
|
|
|
async def check_all_domains():
|
|
"""Check availability of all monitored domains."""
|
|
logger.info("Starting daily domain check...")
|
|
start_time = datetime.utcnow()
|
|
|
|
async with AsyncSessionLocal() as db:
|
|
# Get all domains
|
|
result = await db.execute(select(Domain))
|
|
domains = result.scalars().all()
|
|
|
|
logger.info(f"Checking {len(domains)} domains...")
|
|
|
|
checked = 0
|
|
errors = 0
|
|
newly_available = []
|
|
|
|
for domain in domains:
|
|
try:
|
|
# Check domain availability
|
|
check_result = await domain_checker.check_domain(domain.name)
|
|
|
|
# Track if domain became available
|
|
was_taken = not domain.is_available
|
|
is_now_available = check_result.is_available
|
|
|
|
if was_taken and is_now_available and domain.notify_on_available:
|
|
newly_available.append(domain)
|
|
|
|
# Update domain
|
|
domain.status = check_result.status
|
|
domain.is_available = check_result.is_available
|
|
domain.registrar = check_result.registrar
|
|
domain.expiration_date = check_result.expiration_date
|
|
domain.last_checked = datetime.utcnow()
|
|
|
|
# Create check record
|
|
check = DomainCheck(
|
|
domain_id=domain.id,
|
|
status=check_result.status,
|
|
is_available=check_result.is_available,
|
|
response_data=str(check_result.to_dict()),
|
|
checked_at=datetime.utcnow(),
|
|
)
|
|
db.add(check)
|
|
|
|
checked += 1
|
|
|
|
# Small delay to avoid rate limiting
|
|
await asyncio.sleep(0.5)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error checking domain {domain.name}: {e}")
|
|
errors += 1
|
|
|
|
await db.commit()
|
|
|
|
elapsed = (datetime.utcnow() - start_time).total_seconds()
|
|
logger.info(
|
|
f"Domain check complete. Checked: {checked}, Errors: {errors}, "
|
|
f"Newly available: {len(newly_available)}, Time: {elapsed:.2f}s"
|
|
)
|
|
|
|
# Send notifications for newly available domains
|
|
if newly_available:
|
|
logger.info(f"Domains that became available: {[d.name for d in newly_available]}")
|
|
await send_domain_availability_alerts(db, newly_available)
|
|
|
|
|
|
def setup_scheduler():
|
|
"""Configure and start the scheduler."""
|
|
# Daily domain check at configured hour
|
|
scheduler.add_job(
|
|
check_all_domains,
|
|
CronTrigger(hour=settings.check_hour, minute=settings.check_minute),
|
|
id="daily_domain_check",
|
|
name="Daily Domain Availability Check",
|
|
replace_existing=True,
|
|
)
|
|
|
|
# Daily TLD price scrape at 03:00 UTC
|
|
scheduler.add_job(
|
|
scrape_tld_prices,
|
|
CronTrigger(hour=3, minute=0),
|
|
id="daily_tld_scrape",
|
|
name="Daily TLD Price Scrape",
|
|
replace_existing=True,
|
|
)
|
|
|
|
# Price change check at 04:00 UTC (after scrape completes)
|
|
scheduler.add_job(
|
|
check_price_changes,
|
|
CronTrigger(hour=4, minute=0),
|
|
id="daily_price_check",
|
|
name="Daily Price Change Check",
|
|
replace_existing=True,
|
|
)
|
|
|
|
logger.info(
|
|
f"Scheduler configured:"
|
|
f"\n - Domain check at {settings.check_hour:02d}:{settings.check_minute:02d}"
|
|
f"\n - TLD price scrape at 03:00 UTC"
|
|
f"\n - Price change alerts at 04:00 UTC"
|
|
)
|
|
|
|
|
|
def start_scheduler():
|
|
"""Start the scheduler if not already running."""
|
|
if not scheduler.running:
|
|
setup_scheduler()
|
|
scheduler.start()
|
|
logger.info("Scheduler started")
|
|
|
|
|
|
def stop_scheduler():
|
|
"""Stop the scheduler."""
|
|
if scheduler.running:
|
|
scheduler.shutdown()
|
|
logger.info("Scheduler stopped")
|
|
|
|
|
|
async def run_manual_check():
|
|
"""Run domain check manually (for testing or on-demand)."""
|
|
await check_all_domains()
|
|
|
|
|
|
async def run_manual_tld_scrape():
|
|
"""Run TLD price scrape manually (for testing or on-demand)."""
|
|
await scrape_tld_prices()
|
|
|
|
|
|
async def send_domain_availability_alerts(db, domains: list[Domain]):
|
|
"""Send email alerts for newly available domains."""
|
|
if not email_service.is_enabled:
|
|
logger.info("Email service not configured, skipping domain alerts")
|
|
return
|
|
|
|
alerts_sent = 0
|
|
|
|
for domain in domains:
|
|
try:
|
|
# Get domain owner
|
|
result = await db.execute(
|
|
select(User).where(User.id == domain.user_id)
|
|
)
|
|
user = result.scalar_one_or_none()
|
|
|
|
if user and user.email:
|
|
success = await email_service.send_domain_available_alert(
|
|
to_email=user.email,
|
|
domain=domain.name,
|
|
user_name=user.name,
|
|
)
|
|
if success:
|
|
alerts_sent += 1
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to send alert for {domain.name}: {e}")
|
|
|
|
logger.info(f"Sent {alerts_sent} domain availability alerts")
|
|
|
|
|
|
async def check_price_changes():
|
|
"""Check for TLD price changes and send alerts."""
|
|
logger.info("Checking for TLD price changes...")
|
|
|
|
try:
|
|
async with AsyncSessionLocal() as db:
|
|
# Detect changes in last 24 hours
|
|
changes = await price_tracker.detect_price_changes(db, hours=24)
|
|
|
|
if changes:
|
|
logger.info(f"Found {len(changes)} significant price changes")
|
|
|
|
# Send alerts (if email is configured)
|
|
alerts_sent = await price_tracker.send_price_alerts(db, changes)
|
|
logger.info(f"Sent {alerts_sent} price change alerts")
|
|
else:
|
|
logger.info("No significant price changes detected")
|
|
|
|
except Exception as e:
|
|
logger.exception(f"Price change check failed: {e}")
|
|
|