perf: phase 0 scheduler split, market feed paging, health cache, price tracker
This commit is contained in:
@ -943,52 +943,101 @@ async def get_market_feed(
|
||||
|
||||
POUNCE EXCLUSIVE domains are highlighted and appear first.
|
||||
"""
|
||||
items: List[MarketFeedItem] = []
|
||||
pounce_count = 0
|
||||
auction_count = 0
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════
|
||||
# 1. POUNCE DIRECT LISTINGS (Our USP!)
|
||||
# ═══════════════════════════════════════════════════════════════
|
||||
# NOTE: This endpoint is called frequently by the Market UI.
|
||||
# Avoid loading *all* auctions/listings into Python. Instead, we:
|
||||
# - Apply filters + ordering in SQL where possible
|
||||
# - Over-fetch a bounded window for combined feeds ("all") and score-sorting
|
||||
now = datetime.utcnow()
|
||||
tld_clean = tld.lower().lstrip(".") if tld else None
|
||||
|
||||
requested = offset + limit
|
||||
fetch_window = min(max(requested * 3, 200), 2000) # bounded overfetch for merge/sort
|
||||
|
||||
built: list[dict] = [] # {"item": MarketFeedItem, "newest_ts": datetime}
|
||||
|
||||
# -----------------------------
|
||||
# Build base filters (SQL-side)
|
||||
# -----------------------------
|
||||
listing_filters = [DomainListing.status == ListingStatus.ACTIVE.value]
|
||||
if keyword:
|
||||
listing_filters.append(DomainListing.domain.ilike(f"%{keyword}%"))
|
||||
if verified_only:
|
||||
listing_filters.append(DomainListing.verification_status == VerificationStatus.VERIFIED.value)
|
||||
if min_price is not None:
|
||||
listing_filters.append(DomainListing.asking_price >= min_price)
|
||||
if max_price is not None:
|
||||
listing_filters.append(DomainListing.asking_price <= max_price)
|
||||
if tld_clean:
|
||||
listing_filters.append(DomainListing.domain.ilike(f"%.{tld_clean}"))
|
||||
|
||||
auction_filters = [
|
||||
DomainAuction.is_active == True,
|
||||
DomainAuction.end_time > now,
|
||||
]
|
||||
if keyword:
|
||||
auction_filters.append(DomainAuction.domain.ilike(f"%{keyword}%"))
|
||||
if tld_clean:
|
||||
auction_filters.append(DomainAuction.tld == tld_clean)
|
||||
if min_price is not None:
|
||||
auction_filters.append(DomainAuction.current_bid >= min_price)
|
||||
if max_price is not None:
|
||||
auction_filters.append(DomainAuction.current_bid <= max_price)
|
||||
if ending_within:
|
||||
cutoff = now + timedelta(hours=ending_within)
|
||||
auction_filters.append(DomainAuction.end_time <= cutoff)
|
||||
|
||||
# -----------------------------
|
||||
# Counts (used for UI stats)
|
||||
# -----------------------------
|
||||
pounce_total = 0
|
||||
auction_total = 0
|
||||
if source in ["all", "pounce"]:
|
||||
listing_query = select(DomainListing).where(
|
||||
DomainListing.status == ListingStatus.ACTIVE.value
|
||||
)
|
||||
|
||||
if keyword:
|
||||
listing_query = listing_query.where(
|
||||
DomainListing.domain.ilike(f"%{keyword}%")
|
||||
)
|
||||
|
||||
if verified_only:
|
||||
listing_query = listing_query.where(
|
||||
DomainListing.verification_status == VerificationStatus.VERIFIED.value
|
||||
)
|
||||
|
||||
if min_price is not None:
|
||||
listing_query = listing_query.where(DomainListing.asking_price >= min_price)
|
||||
if max_price is not None:
|
||||
listing_query = listing_query.where(DomainListing.asking_price <= max_price)
|
||||
|
||||
result = await db.execute(listing_query)
|
||||
listings = result.scalars().all()
|
||||
|
||||
pounce_total = (await db.execute(select(func.count(DomainListing.id)).where(and_(*listing_filters)))).scalar() or 0
|
||||
if source in ["all", "external"]:
|
||||
auction_total = (await db.execute(select(func.count(DomainAuction.id)).where(and_(*auction_filters)))).scalar() or 0
|
||||
|
||||
# -----------------------------
|
||||
# Fetch + build items (bounded)
|
||||
# -----------------------------
|
||||
# For "all": fetch a bounded window from each source and then merge/sort in Python.
|
||||
# For single-source: fetch offset/limit directly when sort can be pushed to SQL.
|
||||
listing_offset = 0
|
||||
listing_limit = fetch_window
|
||||
auction_offset = 0
|
||||
auction_limit = fetch_window
|
||||
|
||||
if source == "pounce":
|
||||
listing_offset = offset
|
||||
listing_limit = limit
|
||||
if source == "external" and sort_by != "score":
|
||||
auction_offset = offset
|
||||
auction_limit = limit
|
||||
|
||||
# Pounce Direct listings
|
||||
if source in ["all", "pounce"]:
|
||||
listing_query = select(DomainListing).where(and_(*listing_filters))
|
||||
|
||||
# SQL ordering for listings (best-effort)
|
||||
if sort_by == "price_asc":
|
||||
listing_query = listing_query.order_by(func.coalesce(DomainListing.asking_price, 0).asc())
|
||||
elif sort_by == "price_desc":
|
||||
listing_query = listing_query.order_by(func.coalesce(DomainListing.asking_price, 0).desc())
|
||||
elif sort_by == "newest":
|
||||
listing_query = listing_query.order_by(DomainListing.updated_at.desc())
|
||||
else:
|
||||
# score/time: prefer higher score first for listings
|
||||
listing_query = listing_query.order_by(DomainListing.pounce_score.desc(), DomainListing.updated_at.desc())
|
||||
|
||||
listing_query = listing_query.offset(listing_offset).limit(listing_limit)
|
||||
listings = (await db.execute(listing_query)).scalars().all()
|
||||
|
||||
for listing in listings:
|
||||
domain_tld = listing.domain.rsplit('.', 1)[1] if '.' in listing.domain else ""
|
||||
|
||||
# Apply TLD filter
|
||||
if tld and domain_tld.lower() != tld.lower().lstrip('.'):
|
||||
continue
|
||||
|
||||
pounce_score = listing.pounce_score or _calculate_pounce_score_v2(
|
||||
listing.domain, domain_tld, is_pounce=True
|
||||
)
|
||||
|
||||
# Apply score filter
|
||||
domain_tld = listing.domain.rsplit(".", 1)[1] if "." in listing.domain else ""
|
||||
pounce_score = listing.pounce_score or _calculate_pounce_score_v2(listing.domain, domain_tld, is_pounce=True)
|
||||
if pounce_score < min_score:
|
||||
continue
|
||||
|
||||
items.append(MarketFeedItem(
|
||||
|
||||
item = MarketFeedItem(
|
||||
id=f"pounce-{listing.id}",
|
||||
domain=listing.domain,
|
||||
tld=domain_tld,
|
||||
@ -1004,61 +1053,45 @@ async def get_market_feed(
|
||||
url=f"/buy/{listing.slug}",
|
||||
is_external=False,
|
||||
pounce_score=pounce_score,
|
||||
))
|
||||
pounce_count += 1
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════
|
||||
# 2. EXTERNAL AUCTIONS (Scraped from platforms)
|
||||
# ═══════════════════════════════════════════════════════════════
|
||||
)
|
||||
built.append({"item": item, "newest_ts": listing.updated_at or listing.created_at or datetime.min})
|
||||
|
||||
# External auctions
|
||||
if source in ["all", "external"]:
|
||||
now = datetime.utcnow()
|
||||
auction_query = select(DomainAuction).where(
|
||||
and_(
|
||||
DomainAuction.is_active == True,
|
||||
DomainAuction.end_time > now # ← KRITISCH: Nur laufende Auktionen!
|
||||
)
|
||||
)
|
||||
|
||||
if keyword:
|
||||
auction_query = auction_query.where(
|
||||
DomainAuction.domain.ilike(f"%{keyword}%")
|
||||
)
|
||||
|
||||
if tld:
|
||||
auction_query = auction_query.where(
|
||||
DomainAuction.tld == tld.lower().lstrip('.')
|
||||
)
|
||||
|
||||
if min_price is not None:
|
||||
auction_query = auction_query.where(DomainAuction.current_bid >= min_price)
|
||||
if max_price is not None:
|
||||
auction_query = auction_query.where(DomainAuction.current_bid <= max_price)
|
||||
|
||||
if ending_within:
|
||||
cutoff = datetime.utcnow() + timedelta(hours=ending_within)
|
||||
auction_query = auction_query.where(DomainAuction.end_time <= cutoff)
|
||||
|
||||
result = await db.execute(auction_query)
|
||||
auctions = result.scalars().all()
|
||||
|
||||
auction_query = select(DomainAuction).where(and_(*auction_filters))
|
||||
|
||||
# SQL ordering for auctions when possible
|
||||
if sort_by == "time":
|
||||
auction_query = auction_query.order_by(DomainAuction.end_time.asc())
|
||||
elif sort_by == "price_asc":
|
||||
auction_query = auction_query.order_by(DomainAuction.current_bid.asc())
|
||||
elif sort_by == "price_desc":
|
||||
auction_query = auction_query.order_by(DomainAuction.current_bid.desc())
|
||||
elif sort_by == "newest":
|
||||
auction_query = auction_query.order_by(DomainAuction.updated_at.desc())
|
||||
else:
|
||||
# score: we will compute score in Python (Phase 1 introduces persisted score)
|
||||
auction_query = auction_query.order_by(DomainAuction.updated_at.desc())
|
||||
|
||||
auction_query = auction_query.offset(auction_offset).limit(auction_limit)
|
||||
auctions = (await db.execute(auction_query)).scalars().all()
|
||||
|
||||
for auction in auctions:
|
||||
# Apply vanity filter for non-authenticated users
|
||||
# Vanity filter for anonymous users
|
||||
if current_user is None and not _is_premium_domain(auction.domain):
|
||||
continue
|
||||
|
||||
|
||||
pounce_score = _calculate_pounce_score_v2(
|
||||
auction.domain,
|
||||
auction.tld,
|
||||
num_bids=auction.num_bids,
|
||||
age_years=auction.age_years or 0,
|
||||
is_pounce=False
|
||||
is_pounce=False,
|
||||
)
|
||||
|
||||
# Apply score filter
|
||||
if pounce_score < min_score:
|
||||
continue
|
||||
|
||||
items.append(MarketFeedItem(
|
||||
|
||||
item = MarketFeedItem(
|
||||
id=f"auction-{auction.id}",
|
||||
domain=auction.domain,
|
||||
tld=auction.tld,
|
||||
@ -1075,47 +1108,46 @@ async def get_market_feed(
|
||||
url=_get_affiliate_url(auction.platform, auction.domain, auction.auction_url),
|
||||
is_external=True,
|
||||
pounce_score=pounce_score,
|
||||
))
|
||||
auction_count += 1
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════
|
||||
# 3. SORT (Pounce Direct always appears first within same score)
|
||||
# ═══════════════════════════════════════════════════════════════
|
||||
)
|
||||
built.append({"item": item, "newest_ts": auction.updated_at or auction.scraped_at or datetime.min})
|
||||
|
||||
# -----------------------------
|
||||
# Merge sort (Python) + paginate
|
||||
# -----------------------------
|
||||
if sort_by == "score":
|
||||
items.sort(key=lambda x: (-x.pounce_score, -int(x.is_pounce), x.domain))
|
||||
built.sort(key=lambda x: (x["item"].pounce_score, int(x["item"].is_pounce), x["item"].domain), reverse=True)
|
||||
elif sort_by == "price_asc":
|
||||
items.sort(key=lambda x: (x.price, -int(x.is_pounce), x.domain))
|
||||
built.sort(key=lambda x: (x["item"].price, -int(x["item"].is_pounce), x["item"].domain))
|
||||
elif sort_by == "price_desc":
|
||||
items.sort(key=lambda x: (-x.price, -int(x.is_pounce), x.domain))
|
||||
built.sort(key=lambda x: (-x["item"].price, -int(x["item"].is_pounce), x["item"].domain))
|
||||
elif sort_by == "time":
|
||||
# Pounce Direct first (no time limit), then by end time
|
||||
def time_sort_key(x):
|
||||
if x.is_pounce:
|
||||
return (0, datetime.max)
|
||||
return (1, x.end_time or datetime.max)
|
||||
items.sort(key=time_sort_key)
|
||||
built.sort(
|
||||
key=lambda x: (0 if x["item"].is_pounce else 1, x["item"].end_time or datetime.max)
|
||||
)
|
||||
elif sort_by == "newest":
|
||||
items.sort(key=lambda x: (-int(x.is_pounce), x.domain))
|
||||
|
||||
total = len(items)
|
||||
|
||||
# Pagination
|
||||
items = items[offset:offset + limit]
|
||||
|
||||
# Get unique sources
|
||||
built.sort(key=lambda x: (int(x["item"].is_pounce), x["newest_ts"]), reverse=True)
|
||||
|
||||
total = pounce_total + auction_total if source == "all" else (pounce_total if source == "pounce" else auction_total)
|
||||
|
||||
page_slice = built[offset:offset + limit]
|
||||
items = [x["item"] for x in page_slice]
|
||||
|
||||
# Unique sources (after pagination)
|
||||
sources = list(set(item.source for item in items))
|
||||
|
||||
# Last update time
|
||||
last_update_result = await db.execute(
|
||||
select(func.max(DomainAuction.updated_at))
|
||||
)
|
||||
last_updated = last_update_result.scalar() or datetime.utcnow()
|
||||
|
||||
|
||||
# Last update time (auctions)
|
||||
if source in ["all", "external"]:
|
||||
last_update_result = await db.execute(select(func.max(DomainAuction.updated_at)))
|
||||
last_updated = last_update_result.scalar() or now
|
||||
else:
|
||||
last_updated = now
|
||||
|
||||
return MarketFeedResponse(
|
||||
items=items,
|
||||
total=total,
|
||||
pounce_direct_count=pounce_count,
|
||||
auction_count=auction_count,
|
||||
pounce_direct_count=pounce_total,
|
||||
auction_count=auction_total,
|
||||
sources=sources,
|
||||
last_updated=last_updated,
|
||||
filters_applied={
|
||||
@ -1128,5 +1160,5 @@ async def get_market_feed(
|
||||
"ending_within": ending_within,
|
||||
"verified_only": verified_only,
|
||||
"sort_by": sort_by,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@ -1,13 +1,14 @@
|
||||
"""Domain management API (requires authentication)."""
|
||||
import json
|
||||
from datetime import datetime
|
||||
from math import ceil
|
||||
|
||||
from fastapi import APIRouter, HTTPException, status, Query
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy import select, func, and_
|
||||
|
||||
from app.api.deps import Database, CurrentUser
|
||||
from app.models.domain import Domain, DomainCheck, DomainStatus
|
||||
from app.models.domain import Domain, DomainCheck, DomainStatus, DomainHealthCache
|
||||
from app.models.subscription import TIER_CONFIG, SubscriptionTier
|
||||
from app.schemas.domain import DomainCreate, DomainResponse, DomainListResponse
|
||||
from app.services.domain_checker import domain_checker
|
||||
@ -15,6 +16,38 @@ from app.services.domain_health import get_health_checker, HealthStatus
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
def _safe_json_loads(value: str | None, default):
|
||||
if not value:
|
||||
return default
|
||||
try:
|
||||
return json.loads(value)
|
||||
except Exception:
|
||||
return default
|
||||
|
||||
|
||||
def _health_cache_to_report(domain: Domain, cache: DomainHealthCache) -> dict:
|
||||
"""Convert DomainHealthCache row into the same shape as DomainHealthReport.to_dict()."""
|
||||
return {
|
||||
"domain": domain.name,
|
||||
"status": cache.status or "unknown",
|
||||
"score": cache.score or 0,
|
||||
"signals": _safe_json_loads(cache.signals, []),
|
||||
"recommendations": [], # not stored in cache (yet)
|
||||
"checked_at": cache.checked_at.isoformat() if cache.checked_at else datetime.utcnow().isoformat(),
|
||||
"dns": _safe_json_loads(
|
||||
cache.dns_data,
|
||||
{"has_ns": False, "has_a": False, "has_mx": False, "nameservers": [], "is_parked": False, "error": None},
|
||||
),
|
||||
"http": _safe_json_loads(
|
||||
cache.http_data,
|
||||
{"is_reachable": False, "status_code": None, "is_parked": False, "parking_keywords": [], "content_length": 0, "error": None},
|
||||
),
|
||||
"ssl": _safe_json_loads(
|
||||
cache.ssl_data,
|
||||
{"has_certificate": False, "is_valid": False, "expires_at": None, "days_until_expiry": None, "issuer": None, "error": None},
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@router.get("", response_model=DomainListResponse)
|
||||
async def list_domains(
|
||||
@ -49,6 +82,40 @@ async def list_domains(
|
||||
)
|
||||
|
||||
|
||||
@router.get("/health-cache")
|
||||
async def get_domains_health_cache(
|
||||
current_user: CurrentUser,
|
||||
db: Database,
|
||||
):
|
||||
"""
|
||||
Get cached domain health reports for the current user (bulk).
|
||||
|
||||
This avoids N requests from the frontend and returns the cached health
|
||||
data written by the scheduler job.
|
||||
"""
|
||||
result = await db.execute(
|
||||
select(Domain, DomainHealthCache)
|
||||
.outerjoin(DomainHealthCache, DomainHealthCache.domain_id == Domain.id)
|
||||
.where(Domain.user_id == current_user.id)
|
||||
)
|
||||
rows = result.all()
|
||||
|
||||
reports: dict[str, dict] = {}
|
||||
cached = 0
|
||||
for domain, cache in rows:
|
||||
if cache is None:
|
||||
continue
|
||||
reports[str(domain.id)] = _health_cache_to_report(domain, cache)
|
||||
cached += 1
|
||||
|
||||
return {
|
||||
"reports": reports,
|
||||
"total_domains": len(rows),
|
||||
"cached_domains": cached,
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
}
|
||||
|
||||
|
||||
@router.post("", response_model=DomainResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def add_domain(
|
||||
domain_data: DomainCreate,
|
||||
@ -372,6 +439,7 @@ async def get_domain_health(
|
||||
domain_id: int,
|
||||
current_user: CurrentUser,
|
||||
db: Database,
|
||||
refresh: bool = Query(False, description="Force a live health check instead of using cache"),
|
||||
):
|
||||
"""
|
||||
Get comprehensive health report for a domain.
|
||||
@ -400,11 +468,44 @@ async def get_domain_health(
|
||||
detail="Domain not found",
|
||||
)
|
||||
|
||||
# Run health check
|
||||
# Prefer cached report for UI performance
|
||||
if not refresh:
|
||||
cache_result = await db.execute(
|
||||
select(DomainHealthCache).where(DomainHealthCache.domain_id == domain.id)
|
||||
)
|
||||
cache = cache_result.scalar_one_or_none()
|
||||
if cache is not None:
|
||||
return _health_cache_to_report(domain, cache)
|
||||
|
||||
# Live health check (slow) + update cache
|
||||
health_checker = get_health_checker()
|
||||
report = await health_checker.check_domain(domain.name)
|
||||
|
||||
return report.to_dict()
|
||||
report_dict = report.to_dict()
|
||||
|
||||
signals_json = json.dumps(report_dict.get("signals") or [])
|
||||
dns_json = json.dumps(report_dict.get("dns") or {})
|
||||
http_json = json.dumps(report_dict.get("http") or {})
|
||||
ssl_json = json.dumps(report_dict.get("ssl") or {})
|
||||
|
||||
cache_result = await db.execute(
|
||||
select(DomainHealthCache).where(DomainHealthCache.domain_id == domain.id)
|
||||
)
|
||||
cache = cache_result.scalar_one_or_none()
|
||||
if cache is None:
|
||||
cache = DomainHealthCache(domain_id=domain.id)
|
||||
db.add(cache)
|
||||
|
||||
cache.status = report_dict.get("status") or "unknown"
|
||||
cache.score = int(report_dict.get("score") or 0)
|
||||
cache.signals = signals_json
|
||||
cache.dns_data = dns_json
|
||||
cache.http_data = http_json
|
||||
cache.ssl_data = ssl_json
|
||||
cache.checked_at = datetime.utcnow()
|
||||
|
||||
await db.commit()
|
||||
|
||||
return report_dict
|
||||
|
||||
|
||||
@router.post("/health-check")
|
||||
|
||||
@ -32,6 +32,7 @@ class Settings(BaseSettings):
|
||||
check_hour: int = 6
|
||||
check_minute: int = 0
|
||||
scheduler_check_interval_hours: int = 24
|
||||
enable_scheduler: bool = False # Run APScheduler jobs in this process (recommend: separate scheduler process)
|
||||
|
||||
# =================================
|
||||
# External API Credentials
|
||||
|
||||
@ -46,14 +46,18 @@ async def lifespan(app: FastAPI):
|
||||
await init_db()
|
||||
logger.info("Database initialized")
|
||||
|
||||
# Start scheduler
|
||||
start_scheduler()
|
||||
logger.info("Scheduler started")
|
||||
# Start scheduler (optional - recommended: run in separate process/container)
|
||||
if settings.enable_scheduler:
|
||||
start_scheduler()
|
||||
logger.info("Scheduler started")
|
||||
else:
|
||||
logger.info("Scheduler disabled (ENABLE_SCHEDULER=false)")
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
stop_scheduler()
|
||||
if settings.enable_scheduler:
|
||||
stop_scheduler()
|
||||
logger.info("Application shutdown complete")
|
||||
|
||||
|
||||
|
||||
@ -368,15 +368,22 @@ async def run_health_checks():
|
||||
})
|
||||
logger.info(f"⚠️ Status change: {domain.name} {old_status} → {new_status}")
|
||||
|
||||
# Serialize data to JSON strings
|
||||
# Serialize data to JSON strings (cache is used by the UI)
|
||||
import json
|
||||
signals_json = json.dumps(report.signals) if report.signals else None
|
||||
report_dict = report.to_dict()
|
||||
signals_json = json.dumps(report_dict.get("signals") or [])
|
||||
dns_json = json.dumps(report_dict.get("dns") or {})
|
||||
http_json = json.dumps(report_dict.get("http") or {})
|
||||
ssl_json = json.dumps(report_dict.get("ssl") or {})
|
||||
|
||||
# Update or create cache
|
||||
if existing_cache:
|
||||
existing_cache.status = new_status
|
||||
existing_cache.score = report.score
|
||||
existing_cache.signals = signals_json
|
||||
existing_cache.dns_data = dns_json
|
||||
existing_cache.http_data = http_json
|
||||
existing_cache.ssl_data = ssl_json
|
||||
existing_cache.checked_at = datetime.utcnow()
|
||||
else:
|
||||
# Create new cache entry
|
||||
@ -385,6 +392,9 @@ async def run_health_checks():
|
||||
status=new_status,
|
||||
score=report.score,
|
||||
signals=signals_json,
|
||||
dns_data=dns_json,
|
||||
http_data=http_json,
|
||||
ssl_data=ssl_json,
|
||||
checked_at=datetime.utcnow(),
|
||||
)
|
||||
db.add(new_cache)
|
||||
|
||||
@ -68,59 +68,73 @@ class PriceTracker:
|
||||
Returns:
|
||||
List of significant price changes
|
||||
"""
|
||||
changes = []
|
||||
changes: list[PriceChange] = []
|
||||
now = datetime.utcnow()
|
||||
cutoff = now - timedelta(hours=hours)
|
||||
|
||||
# Get unique TLD/registrar combinations
|
||||
tld_registrars = await db.execute(
|
||||
select(TLDPrice.tld, TLDPrice.registrar)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
for tld, registrar in tld_registrars:
|
||||
# Get the two most recent prices for this TLD/registrar
|
||||
result = await db.execute(
|
||||
select(TLDPrice)
|
||||
.where(
|
||||
and_(
|
||||
TLDPrice.tld == tld,
|
||||
TLDPrice.registrar == registrar,
|
||||
|
||||
# PERF: Avoid N+1 queries (distinct(tld, registrar) + per-pair LIMIT 2).
|
||||
# We fetch the latest 2 rows per (tld, registrar) using a window function.
|
||||
ranked = (
|
||||
select(
|
||||
TLDPrice.tld.label("tld"),
|
||||
TLDPrice.registrar.label("registrar"),
|
||||
TLDPrice.registration_price.label("price"),
|
||||
TLDPrice.recorded_at.label("recorded_at"),
|
||||
func.row_number()
|
||||
.over(
|
||||
partition_by=(TLDPrice.tld, TLDPrice.registrar),
|
||||
order_by=TLDPrice.recorded_at.desc(),
|
||||
)
|
||||
.label("rn"),
|
||||
)
|
||||
).subquery()
|
||||
|
||||
rows = (
|
||||
await db.execute(
|
||||
select(
|
||||
ranked.c.tld,
|
||||
ranked.c.registrar,
|
||||
ranked.c.price,
|
||||
ranked.c.recorded_at,
|
||||
ranked.c.rn,
|
||||
)
|
||||
.where(ranked.c.rn <= 2)
|
||||
.order_by(ranked.c.tld, ranked.c.registrar, ranked.c.rn)
|
||||
)
|
||||
).all()
|
||||
|
||||
from itertools import groupby
|
||||
|
||||
for (tld, registrar), grp in groupby(rows, key=lambda r: (r.tld, r.registrar)):
|
||||
pair = list(grp)
|
||||
if len(pair) < 2:
|
||||
continue
|
||||
|
||||
newest = pair[0] if pair[0].rn == 1 else pair[1]
|
||||
previous = pair[1] if pair[0].rn == 1 else pair[0]
|
||||
|
||||
# Only consider if the newest price is within the requested window
|
||||
if newest.recorded_at is None or newest.recorded_at < cutoff:
|
||||
continue
|
||||
|
||||
if not previous.price or previous.price == 0:
|
||||
continue
|
||||
|
||||
change_amount = float(newest.price) - float(previous.price)
|
||||
change_percent = (change_amount / float(previous.price)) * 100
|
||||
|
||||
if abs(change_percent) >= self.SIGNIFICANT_CHANGE_THRESHOLD:
|
||||
changes.append(
|
||||
PriceChange(
|
||||
tld=tld,
|
||||
registrar=registrar,
|
||||
old_price=float(previous.price),
|
||||
new_price=float(newest.price),
|
||||
change_amount=change_amount,
|
||||
change_percent=change_percent,
|
||||
detected_at=newest.recorded_at,
|
||||
)
|
||||
)
|
||||
.order_by(TLDPrice.recorded_at.desc())
|
||||
.limit(2)
|
||||
)
|
||||
prices = result.scalars().all()
|
||||
|
||||
if len(prices) < 2:
|
||||
continue
|
||||
|
||||
new_price = prices[0]
|
||||
old_price = prices[1]
|
||||
|
||||
# Check if change is within our time window
|
||||
if new_price.recorded_at < cutoff:
|
||||
continue
|
||||
|
||||
# Calculate change
|
||||
if old_price.registration_price == 0:
|
||||
continue
|
||||
|
||||
change_amount = new_price.registration_price - old_price.registration_price
|
||||
change_percent = (change_amount / old_price.registration_price) * 100
|
||||
|
||||
# Only track significant changes
|
||||
if abs(change_percent) >= self.SIGNIFICANT_CHANGE_THRESHOLD:
|
||||
changes.append(PriceChange(
|
||||
tld=tld,
|
||||
registrar=registrar,
|
||||
old_price=old_price.registration_price,
|
||||
new_price=new_price.registration_price,
|
||||
change_amount=change_amount,
|
||||
change_percent=change_percent,
|
||||
detected_at=new_price.recorded_at,
|
||||
))
|
||||
|
||||
# Sort by absolute change percentage (most significant first)
|
||||
changes.sort(key=lambda x: abs(x.change_percent), reverse=True)
|
||||
|
||||
63
backend/run_scheduler.py
Normal file
63
backend/run_scheduler.py
Normal file
@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Standalone scheduler runner for Pounce.
|
||||
|
||||
Runs APScheduler jobs without starting the FastAPI server.
|
||||
Recommended for production to avoid duplicate jobs when running multiple API workers.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import signal
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load .env early (same as app/main.py)
|
||||
load_dotenv()
|
||||
|
||||
from app.config import get_settings
|
||||
from app.database import init_db
|
||||
from app.scheduler import start_scheduler, stop_scheduler
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||
)
|
||||
logger = logging.getLogger("pounce.scheduler")
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
settings = get_settings()
|
||||
logger.info("Starting scheduler runner for %s...", settings.app_name)
|
||||
|
||||
# Ensure DB schema exists (create_all for new installs)
|
||||
await init_db()
|
||||
logger.info("Database initialized")
|
||||
|
||||
start_scheduler()
|
||||
logger.info("Scheduler started")
|
||||
|
||||
stop_event = asyncio.Event()
|
||||
|
||||
def _request_shutdown(sig: signal.Signals) -> None:
|
||||
logger.info("Received %s, shutting down scheduler...", sig.name)
|
||||
stop_event.set()
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
for sig in (signal.SIGTERM, signal.SIGINT):
|
||||
try:
|
||||
loop.add_signal_handler(sig, lambda s=sig: _request_shutdown(s))
|
||||
except NotImplementedError:
|
||||
# Fallback (Windows / limited environments)
|
||||
signal.signal(sig, lambda *_: _request_shutdown(sig))
|
||||
|
||||
await stop_event.wait()
|
||||
|
||||
stop_scheduler()
|
||||
logger.info("Scheduler stopped. Bye.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
||||
|
||||
@ -31,6 +31,7 @@ services:
|
||||
DATABASE_URL: postgresql+asyncpg://pounce:${DB_PASSWORD:-changeme}@db:5432/pounce
|
||||
SECRET_KEY: ${SECRET_KEY:-change-this-in-production}
|
||||
CORS_ORIGINS: ${CORS_ORIGINS:-http://localhost:3000}
|
||||
ENABLE_SCHEDULER: "false"
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
@ -40,6 +41,22 @@ services:
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# Scheduler (APScheduler) - runs background jobs in a separate process
|
||||
scheduler:
|
||||
build:
|
||||
context: ./backend
|
||||
dockerfile: Dockerfile
|
||||
container_name: pounce-scheduler
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
DATABASE_URL: postgresql+asyncpg://pounce:${DB_PASSWORD:-changeme}@db:5432/pounce
|
||||
SECRET_KEY: ${SECRET_KEY:-change-this-in-production}
|
||||
ENABLE_SCHEDULER: "true"
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
command: ["python", "run_scheduler.py"]
|
||||
|
||||
# Next.js Frontend
|
||||
frontend:
|
||||
build:
|
||||
|
||||
@ -410,7 +410,8 @@ export default function WatchlistPage() {
|
||||
|
||||
setLoadingHealth(prev => ({ ...prev, [domainId]: true }))
|
||||
try {
|
||||
const report = await api.getDomainHealth(domainId)
|
||||
// Force a live refresh when user explicitly triggers a check
|
||||
const report = await api.getDomainHealth(domainId, { refresh: true })
|
||||
setHealthReports(prev => ({ ...prev, [domainId]: report }))
|
||||
setSelectedHealthDomainId(domainId)
|
||||
} catch (err: any) {
|
||||
@ -426,17 +427,15 @@ export default function WatchlistPage() {
|
||||
const loadHealthData = async () => {
|
||||
if (!domains || domains.length === 0) return
|
||||
|
||||
// Load health for registered domains only (not available ones)
|
||||
const registeredDomains = domains.filter(d => !d.is_available)
|
||||
|
||||
for (const domain of registeredDomains.slice(0, 10)) { // Limit to first 10 to avoid overload
|
||||
try {
|
||||
const report = await api.getDomainHealth(domain.id)
|
||||
setHealthReports(prev => ({ ...prev, [domain.id]: report }))
|
||||
} catch {
|
||||
// Silently fail - health data is optional
|
||||
// Load cached health for all domains in one request (fast path)
|
||||
try {
|
||||
const data = await api.getDomainsHealthCache()
|
||||
if (data?.reports) {
|
||||
// API returns string keys; JS handles number indexing transparently.
|
||||
setHealthReports(prev => ({ ...(prev as any), ...(data.reports as any) }))
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 200)) // Small delay
|
||||
} catch {
|
||||
// Silently fail - health data is optional
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -386,8 +386,9 @@ class ApiClient {
|
||||
}
|
||||
|
||||
// Domain Health Check - 4-layer analysis (DNS, HTTP, SSL, WHOIS)
|
||||
async getDomainHealth(domainId: number) {
|
||||
return this.request<DomainHealthReport>(`/domains/${domainId}/health`)
|
||||
async getDomainHealth(domainId: number, options?: { refresh?: boolean }) {
|
||||
const refreshParam = options?.refresh ? '?refresh=true' : ''
|
||||
return this.request<DomainHealthReport>(`/domains/${domainId}/health${refreshParam}`)
|
||||
}
|
||||
|
||||
// Quick health check for any domain (premium)
|
||||
@ -397,6 +398,16 @@ class ApiClient {
|
||||
})
|
||||
}
|
||||
|
||||
// Bulk cached health reports for watchlist UI (fast)
|
||||
async getDomainsHealthCache() {
|
||||
return this.request<{
|
||||
reports: Record<string, DomainHealthReport>
|
||||
total_domains: number
|
||||
cached_domains: number
|
||||
timestamp: string
|
||||
}>('/domains/health-cache')
|
||||
}
|
||||
|
||||
// TLD Pricing
|
||||
async getTldOverview(
|
||||
limit = 25,
|
||||
|
||||
Reference in New Issue
Block a user