perf: phase 0 scheduler split, market feed paging, health cache, price tracker
This commit is contained in:
@ -943,52 +943,101 @@ async def get_market_feed(
|
|||||||
|
|
||||||
POUNCE EXCLUSIVE domains are highlighted and appear first.
|
POUNCE EXCLUSIVE domains are highlighted and appear first.
|
||||||
"""
|
"""
|
||||||
items: List[MarketFeedItem] = []
|
# NOTE: This endpoint is called frequently by the Market UI.
|
||||||
pounce_count = 0
|
# Avoid loading *all* auctions/listings into Python. Instead, we:
|
||||||
auction_count = 0
|
# - Apply filters + ordering in SQL where possible
|
||||||
|
# - Over-fetch a bounded window for combined feeds ("all") and score-sorting
|
||||||
|
now = datetime.utcnow()
|
||||||
|
tld_clean = tld.lower().lstrip(".") if tld else None
|
||||||
|
|
||||||
# ═══════════════════════════════════════════════════════════════
|
requested = offset + limit
|
||||||
# 1. POUNCE DIRECT LISTINGS (Our USP!)
|
fetch_window = min(max(requested * 3, 200), 2000) # bounded overfetch for merge/sort
|
||||||
# ═══════════════════════════════════════════════════════════════
|
|
||||||
if source in ["all", "pounce"]:
|
|
||||||
listing_query = select(DomainListing).where(
|
|
||||||
DomainListing.status == ListingStatus.ACTIVE.value
|
|
||||||
)
|
|
||||||
|
|
||||||
|
built: list[dict] = [] # {"item": MarketFeedItem, "newest_ts": datetime}
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# Build base filters (SQL-side)
|
||||||
|
# -----------------------------
|
||||||
|
listing_filters = [DomainListing.status == ListingStatus.ACTIVE.value]
|
||||||
if keyword:
|
if keyword:
|
||||||
listing_query = listing_query.where(
|
listing_filters.append(DomainListing.domain.ilike(f"%{keyword}%"))
|
||||||
DomainListing.domain.ilike(f"%{keyword}%")
|
|
||||||
)
|
|
||||||
|
|
||||||
if verified_only:
|
if verified_only:
|
||||||
listing_query = listing_query.where(
|
listing_filters.append(DomainListing.verification_status == VerificationStatus.VERIFIED.value)
|
||||||
DomainListing.verification_status == VerificationStatus.VERIFIED.value
|
|
||||||
)
|
|
||||||
|
|
||||||
if min_price is not None:
|
if min_price is not None:
|
||||||
listing_query = listing_query.where(DomainListing.asking_price >= min_price)
|
listing_filters.append(DomainListing.asking_price >= min_price)
|
||||||
if max_price is not None:
|
if max_price is not None:
|
||||||
listing_query = listing_query.where(DomainListing.asking_price <= max_price)
|
listing_filters.append(DomainListing.asking_price <= max_price)
|
||||||
|
if tld_clean:
|
||||||
|
listing_filters.append(DomainListing.domain.ilike(f"%.{tld_clean}"))
|
||||||
|
|
||||||
result = await db.execute(listing_query)
|
auction_filters = [
|
||||||
listings = result.scalars().all()
|
DomainAuction.is_active == True,
|
||||||
|
DomainAuction.end_time > now,
|
||||||
|
]
|
||||||
|
if keyword:
|
||||||
|
auction_filters.append(DomainAuction.domain.ilike(f"%{keyword}%"))
|
||||||
|
if tld_clean:
|
||||||
|
auction_filters.append(DomainAuction.tld == tld_clean)
|
||||||
|
if min_price is not None:
|
||||||
|
auction_filters.append(DomainAuction.current_bid >= min_price)
|
||||||
|
if max_price is not None:
|
||||||
|
auction_filters.append(DomainAuction.current_bid <= max_price)
|
||||||
|
if ending_within:
|
||||||
|
cutoff = now + timedelta(hours=ending_within)
|
||||||
|
auction_filters.append(DomainAuction.end_time <= cutoff)
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# Counts (used for UI stats)
|
||||||
|
# -----------------------------
|
||||||
|
pounce_total = 0
|
||||||
|
auction_total = 0
|
||||||
|
if source in ["all", "pounce"]:
|
||||||
|
pounce_total = (await db.execute(select(func.count(DomainListing.id)).where(and_(*listing_filters)))).scalar() or 0
|
||||||
|
if source in ["all", "external"]:
|
||||||
|
auction_total = (await db.execute(select(func.count(DomainAuction.id)).where(and_(*auction_filters)))).scalar() or 0
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
# Fetch + build items (bounded)
|
||||||
|
# -----------------------------
|
||||||
|
# For "all": fetch a bounded window from each source and then merge/sort in Python.
|
||||||
|
# For single-source: fetch offset/limit directly when sort can be pushed to SQL.
|
||||||
|
listing_offset = 0
|
||||||
|
listing_limit = fetch_window
|
||||||
|
auction_offset = 0
|
||||||
|
auction_limit = fetch_window
|
||||||
|
|
||||||
|
if source == "pounce":
|
||||||
|
listing_offset = offset
|
||||||
|
listing_limit = limit
|
||||||
|
if source == "external" and sort_by != "score":
|
||||||
|
auction_offset = offset
|
||||||
|
auction_limit = limit
|
||||||
|
|
||||||
|
# Pounce Direct listings
|
||||||
|
if source in ["all", "pounce"]:
|
||||||
|
listing_query = select(DomainListing).where(and_(*listing_filters))
|
||||||
|
|
||||||
|
# SQL ordering for listings (best-effort)
|
||||||
|
if sort_by == "price_asc":
|
||||||
|
listing_query = listing_query.order_by(func.coalesce(DomainListing.asking_price, 0).asc())
|
||||||
|
elif sort_by == "price_desc":
|
||||||
|
listing_query = listing_query.order_by(func.coalesce(DomainListing.asking_price, 0).desc())
|
||||||
|
elif sort_by == "newest":
|
||||||
|
listing_query = listing_query.order_by(DomainListing.updated_at.desc())
|
||||||
|
else:
|
||||||
|
# score/time: prefer higher score first for listings
|
||||||
|
listing_query = listing_query.order_by(DomainListing.pounce_score.desc(), DomainListing.updated_at.desc())
|
||||||
|
|
||||||
|
listing_query = listing_query.offset(listing_offset).limit(listing_limit)
|
||||||
|
listings = (await db.execute(listing_query)).scalars().all()
|
||||||
|
|
||||||
for listing in listings:
|
for listing in listings:
|
||||||
domain_tld = listing.domain.rsplit('.', 1)[1] if '.' in listing.domain else ""
|
domain_tld = listing.domain.rsplit(".", 1)[1] if "." in listing.domain else ""
|
||||||
|
pounce_score = listing.pounce_score or _calculate_pounce_score_v2(listing.domain, domain_tld, is_pounce=True)
|
||||||
# Apply TLD filter
|
|
||||||
if tld and domain_tld.lower() != tld.lower().lstrip('.'):
|
|
||||||
continue
|
|
||||||
|
|
||||||
pounce_score = listing.pounce_score or _calculate_pounce_score_v2(
|
|
||||||
listing.domain, domain_tld, is_pounce=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Apply score filter
|
|
||||||
if pounce_score < min_score:
|
if pounce_score < min_score:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
items.append(MarketFeedItem(
|
item = MarketFeedItem(
|
||||||
id=f"pounce-{listing.id}",
|
id=f"pounce-{listing.id}",
|
||||||
domain=listing.domain,
|
domain=listing.domain,
|
||||||
tld=domain_tld,
|
tld=domain_tld,
|
||||||
@ -1004,45 +1053,31 @@ async def get_market_feed(
|
|||||||
url=f"/buy/{listing.slug}",
|
url=f"/buy/{listing.slug}",
|
||||||
is_external=False,
|
is_external=False,
|
||||||
pounce_score=pounce_score,
|
pounce_score=pounce_score,
|
||||||
))
|
)
|
||||||
pounce_count += 1
|
built.append({"item": item, "newest_ts": listing.updated_at or listing.created_at or datetime.min})
|
||||||
|
|
||||||
# ═══════════════════════════════════════════════════════════════
|
# External auctions
|
||||||
# 2. EXTERNAL AUCTIONS (Scraped from platforms)
|
|
||||||
# ═══════════════════════════════════════════════════════════════
|
|
||||||
if source in ["all", "external"]:
|
if source in ["all", "external"]:
|
||||||
now = datetime.utcnow()
|
auction_query = select(DomainAuction).where(and_(*auction_filters))
|
||||||
auction_query = select(DomainAuction).where(
|
|
||||||
and_(
|
|
||||||
DomainAuction.is_active == True,
|
|
||||||
DomainAuction.end_time > now # ← KRITISCH: Nur laufende Auktionen!
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if keyword:
|
# SQL ordering for auctions when possible
|
||||||
auction_query = auction_query.where(
|
if sort_by == "time":
|
||||||
DomainAuction.domain.ilike(f"%{keyword}%")
|
auction_query = auction_query.order_by(DomainAuction.end_time.asc())
|
||||||
)
|
elif sort_by == "price_asc":
|
||||||
|
auction_query = auction_query.order_by(DomainAuction.current_bid.asc())
|
||||||
|
elif sort_by == "price_desc":
|
||||||
|
auction_query = auction_query.order_by(DomainAuction.current_bid.desc())
|
||||||
|
elif sort_by == "newest":
|
||||||
|
auction_query = auction_query.order_by(DomainAuction.updated_at.desc())
|
||||||
|
else:
|
||||||
|
# score: we will compute score in Python (Phase 1 introduces persisted score)
|
||||||
|
auction_query = auction_query.order_by(DomainAuction.updated_at.desc())
|
||||||
|
|
||||||
if tld:
|
auction_query = auction_query.offset(auction_offset).limit(auction_limit)
|
||||||
auction_query = auction_query.where(
|
auctions = (await db.execute(auction_query)).scalars().all()
|
||||||
DomainAuction.tld == tld.lower().lstrip('.')
|
|
||||||
)
|
|
||||||
|
|
||||||
if min_price is not None:
|
|
||||||
auction_query = auction_query.where(DomainAuction.current_bid >= min_price)
|
|
||||||
if max_price is not None:
|
|
||||||
auction_query = auction_query.where(DomainAuction.current_bid <= max_price)
|
|
||||||
|
|
||||||
if ending_within:
|
|
||||||
cutoff = datetime.utcnow() + timedelta(hours=ending_within)
|
|
||||||
auction_query = auction_query.where(DomainAuction.end_time <= cutoff)
|
|
||||||
|
|
||||||
result = await db.execute(auction_query)
|
|
||||||
auctions = result.scalars().all()
|
|
||||||
|
|
||||||
for auction in auctions:
|
for auction in auctions:
|
||||||
# Apply vanity filter for non-authenticated users
|
# Vanity filter for anonymous users
|
||||||
if current_user is None and not _is_premium_domain(auction.domain):
|
if current_user is None and not _is_premium_domain(auction.domain):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -1051,14 +1086,12 @@ async def get_market_feed(
|
|||||||
auction.tld,
|
auction.tld,
|
||||||
num_bids=auction.num_bids,
|
num_bids=auction.num_bids,
|
||||||
age_years=auction.age_years or 0,
|
age_years=auction.age_years or 0,
|
||||||
is_pounce=False
|
is_pounce=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Apply score filter
|
|
||||||
if pounce_score < min_score:
|
if pounce_score < min_score:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
items.append(MarketFeedItem(
|
item = MarketFeedItem(
|
||||||
id=f"auction-{auction.id}",
|
id=f"auction-{auction.id}",
|
||||||
domain=auction.domain,
|
domain=auction.domain,
|
||||||
tld=auction.tld,
|
tld=auction.tld,
|
||||||
@ -1075,47 +1108,46 @@ async def get_market_feed(
|
|||||||
url=_get_affiliate_url(auction.platform, auction.domain, auction.auction_url),
|
url=_get_affiliate_url(auction.platform, auction.domain, auction.auction_url),
|
||||||
is_external=True,
|
is_external=True,
|
||||||
pounce_score=pounce_score,
|
pounce_score=pounce_score,
|
||||||
))
|
)
|
||||||
auction_count += 1
|
built.append({"item": item, "newest_ts": auction.updated_at or auction.scraped_at or datetime.min})
|
||||||
|
|
||||||
# ═══════════════════════════════════════════════════════════════
|
# -----------------------------
|
||||||
# 3. SORT (Pounce Direct always appears first within same score)
|
# Merge sort (Python) + paginate
|
||||||
# ═══════════════════════════════════════════════════════════════
|
# -----------------------------
|
||||||
if sort_by == "score":
|
if sort_by == "score":
|
||||||
items.sort(key=lambda x: (-x.pounce_score, -int(x.is_pounce), x.domain))
|
built.sort(key=lambda x: (x["item"].pounce_score, int(x["item"].is_pounce), x["item"].domain), reverse=True)
|
||||||
elif sort_by == "price_asc":
|
elif sort_by == "price_asc":
|
||||||
items.sort(key=lambda x: (x.price, -int(x.is_pounce), x.domain))
|
built.sort(key=lambda x: (x["item"].price, -int(x["item"].is_pounce), x["item"].domain))
|
||||||
elif sort_by == "price_desc":
|
elif sort_by == "price_desc":
|
||||||
items.sort(key=lambda x: (-x.price, -int(x.is_pounce), x.domain))
|
built.sort(key=lambda x: (-x["item"].price, -int(x["item"].is_pounce), x["item"].domain))
|
||||||
elif sort_by == "time":
|
elif sort_by == "time":
|
||||||
# Pounce Direct first (no time limit), then by end time
|
# Pounce Direct first (no time limit), then by end time
|
||||||
def time_sort_key(x):
|
built.sort(
|
||||||
if x.is_pounce:
|
key=lambda x: (0 if x["item"].is_pounce else 1, x["item"].end_time or datetime.max)
|
||||||
return (0, datetime.max)
|
)
|
||||||
return (1, x.end_time or datetime.max)
|
|
||||||
items.sort(key=time_sort_key)
|
|
||||||
elif sort_by == "newest":
|
elif sort_by == "newest":
|
||||||
items.sort(key=lambda x: (-int(x.is_pounce), x.domain))
|
built.sort(key=lambda x: (int(x["item"].is_pounce), x["newest_ts"]), reverse=True)
|
||||||
|
|
||||||
total = len(items)
|
total = pounce_total + auction_total if source == "all" else (pounce_total if source == "pounce" else auction_total)
|
||||||
|
|
||||||
# Pagination
|
page_slice = built[offset:offset + limit]
|
||||||
items = items[offset:offset + limit]
|
items = [x["item"] for x in page_slice]
|
||||||
|
|
||||||
# Get unique sources
|
# Unique sources (after pagination)
|
||||||
sources = list(set(item.source for item in items))
|
sources = list(set(item.source for item in items))
|
||||||
|
|
||||||
# Last update time
|
# Last update time (auctions)
|
||||||
last_update_result = await db.execute(
|
if source in ["all", "external"]:
|
||||||
select(func.max(DomainAuction.updated_at))
|
last_update_result = await db.execute(select(func.max(DomainAuction.updated_at)))
|
||||||
)
|
last_updated = last_update_result.scalar() or now
|
||||||
last_updated = last_update_result.scalar() or datetime.utcnow()
|
else:
|
||||||
|
last_updated = now
|
||||||
|
|
||||||
return MarketFeedResponse(
|
return MarketFeedResponse(
|
||||||
items=items,
|
items=items,
|
||||||
total=total,
|
total=total,
|
||||||
pounce_direct_count=pounce_count,
|
pounce_direct_count=pounce_total,
|
||||||
auction_count=auction_count,
|
auction_count=auction_total,
|
||||||
sources=sources,
|
sources=sources,
|
||||||
last_updated=last_updated,
|
last_updated=last_updated,
|
||||||
filters_applied={
|
filters_applied={
|
||||||
@ -1128,5 +1160,5 @@ async def get_market_feed(
|
|||||||
"ending_within": ending_within,
|
"ending_within": ending_within,
|
||||||
"verified_only": verified_only,
|
"verified_only": verified_only,
|
||||||
"sort_by": sort_by,
|
"sort_by": sort_by,
|
||||||
}
|
},
|
||||||
)
|
)
|
||||||
|
|||||||
@ -1,13 +1,14 @@
|
|||||||
"""Domain management API (requires authentication)."""
|
"""Domain management API (requires authentication)."""
|
||||||
|
import json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from math import ceil
|
from math import ceil
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, status, Query
|
from fastapi import APIRouter, HTTPException, status, Query
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from sqlalchemy import select, func
|
from sqlalchemy import select, func, and_
|
||||||
|
|
||||||
from app.api.deps import Database, CurrentUser
|
from app.api.deps import Database, CurrentUser
|
||||||
from app.models.domain import Domain, DomainCheck, DomainStatus
|
from app.models.domain import Domain, DomainCheck, DomainStatus, DomainHealthCache
|
||||||
from app.models.subscription import TIER_CONFIG, SubscriptionTier
|
from app.models.subscription import TIER_CONFIG, SubscriptionTier
|
||||||
from app.schemas.domain import DomainCreate, DomainResponse, DomainListResponse
|
from app.schemas.domain import DomainCreate, DomainResponse, DomainListResponse
|
||||||
from app.services.domain_checker import domain_checker
|
from app.services.domain_checker import domain_checker
|
||||||
@ -15,6 +16,38 @@ from app.services.domain_health import get_health_checker, HealthStatus
|
|||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
def _safe_json_loads(value: str | None, default):
|
||||||
|
if not value:
|
||||||
|
return default
|
||||||
|
try:
|
||||||
|
return json.loads(value)
|
||||||
|
except Exception:
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
def _health_cache_to_report(domain: Domain, cache: DomainHealthCache) -> dict:
|
||||||
|
"""Convert DomainHealthCache row into the same shape as DomainHealthReport.to_dict()."""
|
||||||
|
return {
|
||||||
|
"domain": domain.name,
|
||||||
|
"status": cache.status or "unknown",
|
||||||
|
"score": cache.score or 0,
|
||||||
|
"signals": _safe_json_loads(cache.signals, []),
|
||||||
|
"recommendations": [], # not stored in cache (yet)
|
||||||
|
"checked_at": cache.checked_at.isoformat() if cache.checked_at else datetime.utcnow().isoformat(),
|
||||||
|
"dns": _safe_json_loads(
|
||||||
|
cache.dns_data,
|
||||||
|
{"has_ns": False, "has_a": False, "has_mx": False, "nameservers": [], "is_parked": False, "error": None},
|
||||||
|
),
|
||||||
|
"http": _safe_json_loads(
|
||||||
|
cache.http_data,
|
||||||
|
{"is_reachable": False, "status_code": None, "is_parked": False, "parking_keywords": [], "content_length": 0, "error": None},
|
||||||
|
),
|
||||||
|
"ssl": _safe_json_loads(
|
||||||
|
cache.ssl_data,
|
||||||
|
{"has_certificate": False, "is_valid": False, "expires_at": None, "days_until_expiry": None, "issuer": None, "error": None},
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@router.get("", response_model=DomainListResponse)
|
@router.get("", response_model=DomainListResponse)
|
||||||
async def list_domains(
|
async def list_domains(
|
||||||
@ -49,6 +82,40 @@ async def list_domains(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/health-cache")
|
||||||
|
async def get_domains_health_cache(
|
||||||
|
current_user: CurrentUser,
|
||||||
|
db: Database,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get cached domain health reports for the current user (bulk).
|
||||||
|
|
||||||
|
This avoids N requests from the frontend and returns the cached health
|
||||||
|
data written by the scheduler job.
|
||||||
|
"""
|
||||||
|
result = await db.execute(
|
||||||
|
select(Domain, DomainHealthCache)
|
||||||
|
.outerjoin(DomainHealthCache, DomainHealthCache.domain_id == Domain.id)
|
||||||
|
.where(Domain.user_id == current_user.id)
|
||||||
|
)
|
||||||
|
rows = result.all()
|
||||||
|
|
||||||
|
reports: dict[str, dict] = {}
|
||||||
|
cached = 0
|
||||||
|
for domain, cache in rows:
|
||||||
|
if cache is None:
|
||||||
|
continue
|
||||||
|
reports[str(domain.id)] = _health_cache_to_report(domain, cache)
|
||||||
|
cached += 1
|
||||||
|
|
||||||
|
return {
|
||||||
|
"reports": reports,
|
||||||
|
"total_domains": len(rows),
|
||||||
|
"cached_domains": cached,
|
||||||
|
"timestamp": datetime.utcnow().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@router.post("", response_model=DomainResponse, status_code=status.HTTP_201_CREATED)
|
@router.post("", response_model=DomainResponse, status_code=status.HTTP_201_CREATED)
|
||||||
async def add_domain(
|
async def add_domain(
|
||||||
domain_data: DomainCreate,
|
domain_data: DomainCreate,
|
||||||
@ -372,6 +439,7 @@ async def get_domain_health(
|
|||||||
domain_id: int,
|
domain_id: int,
|
||||||
current_user: CurrentUser,
|
current_user: CurrentUser,
|
||||||
db: Database,
|
db: Database,
|
||||||
|
refresh: bool = Query(False, description="Force a live health check instead of using cache"),
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Get comprehensive health report for a domain.
|
Get comprehensive health report for a domain.
|
||||||
@ -400,11 +468,44 @@ async def get_domain_health(
|
|||||||
detail="Domain not found",
|
detail="Domain not found",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Run health check
|
# Prefer cached report for UI performance
|
||||||
|
if not refresh:
|
||||||
|
cache_result = await db.execute(
|
||||||
|
select(DomainHealthCache).where(DomainHealthCache.domain_id == domain.id)
|
||||||
|
)
|
||||||
|
cache = cache_result.scalar_one_or_none()
|
||||||
|
if cache is not None:
|
||||||
|
return _health_cache_to_report(domain, cache)
|
||||||
|
|
||||||
|
# Live health check (slow) + update cache
|
||||||
health_checker = get_health_checker()
|
health_checker = get_health_checker()
|
||||||
report = await health_checker.check_domain(domain.name)
|
report = await health_checker.check_domain(domain.name)
|
||||||
|
report_dict = report.to_dict()
|
||||||
|
|
||||||
return report.to_dict()
|
signals_json = json.dumps(report_dict.get("signals") or [])
|
||||||
|
dns_json = json.dumps(report_dict.get("dns") or {})
|
||||||
|
http_json = json.dumps(report_dict.get("http") or {})
|
||||||
|
ssl_json = json.dumps(report_dict.get("ssl") or {})
|
||||||
|
|
||||||
|
cache_result = await db.execute(
|
||||||
|
select(DomainHealthCache).where(DomainHealthCache.domain_id == domain.id)
|
||||||
|
)
|
||||||
|
cache = cache_result.scalar_one_or_none()
|
||||||
|
if cache is None:
|
||||||
|
cache = DomainHealthCache(domain_id=domain.id)
|
||||||
|
db.add(cache)
|
||||||
|
|
||||||
|
cache.status = report_dict.get("status") or "unknown"
|
||||||
|
cache.score = int(report_dict.get("score") or 0)
|
||||||
|
cache.signals = signals_json
|
||||||
|
cache.dns_data = dns_json
|
||||||
|
cache.http_data = http_json
|
||||||
|
cache.ssl_data = ssl_json
|
||||||
|
cache.checked_at = datetime.utcnow()
|
||||||
|
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
return report_dict
|
||||||
|
|
||||||
|
|
||||||
@router.post("/health-check")
|
@router.post("/health-check")
|
||||||
|
|||||||
@ -32,6 +32,7 @@ class Settings(BaseSettings):
|
|||||||
check_hour: int = 6
|
check_hour: int = 6
|
||||||
check_minute: int = 0
|
check_minute: int = 0
|
||||||
scheduler_check_interval_hours: int = 24
|
scheduler_check_interval_hours: int = 24
|
||||||
|
enable_scheduler: bool = False # Run APScheduler jobs in this process (recommend: separate scheduler process)
|
||||||
|
|
||||||
# =================================
|
# =================================
|
||||||
# External API Credentials
|
# External API Credentials
|
||||||
|
|||||||
@ -46,13 +46,17 @@ async def lifespan(app: FastAPI):
|
|||||||
await init_db()
|
await init_db()
|
||||||
logger.info("Database initialized")
|
logger.info("Database initialized")
|
||||||
|
|
||||||
# Start scheduler
|
# Start scheduler (optional - recommended: run in separate process/container)
|
||||||
|
if settings.enable_scheduler:
|
||||||
start_scheduler()
|
start_scheduler()
|
||||||
logger.info("Scheduler started")
|
logger.info("Scheduler started")
|
||||||
|
else:
|
||||||
|
logger.info("Scheduler disabled (ENABLE_SCHEDULER=false)")
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
# Shutdown
|
# Shutdown
|
||||||
|
if settings.enable_scheduler:
|
||||||
stop_scheduler()
|
stop_scheduler()
|
||||||
logger.info("Application shutdown complete")
|
logger.info("Application shutdown complete")
|
||||||
|
|
||||||
|
|||||||
@ -368,15 +368,22 @@ async def run_health_checks():
|
|||||||
})
|
})
|
||||||
logger.info(f"⚠️ Status change: {domain.name} {old_status} → {new_status}")
|
logger.info(f"⚠️ Status change: {domain.name} {old_status} → {new_status}")
|
||||||
|
|
||||||
# Serialize data to JSON strings
|
# Serialize data to JSON strings (cache is used by the UI)
|
||||||
import json
|
import json
|
||||||
signals_json = json.dumps(report.signals) if report.signals else None
|
report_dict = report.to_dict()
|
||||||
|
signals_json = json.dumps(report_dict.get("signals") or [])
|
||||||
|
dns_json = json.dumps(report_dict.get("dns") or {})
|
||||||
|
http_json = json.dumps(report_dict.get("http") or {})
|
||||||
|
ssl_json = json.dumps(report_dict.get("ssl") or {})
|
||||||
|
|
||||||
# Update or create cache
|
# Update or create cache
|
||||||
if existing_cache:
|
if existing_cache:
|
||||||
existing_cache.status = new_status
|
existing_cache.status = new_status
|
||||||
existing_cache.score = report.score
|
existing_cache.score = report.score
|
||||||
existing_cache.signals = signals_json
|
existing_cache.signals = signals_json
|
||||||
|
existing_cache.dns_data = dns_json
|
||||||
|
existing_cache.http_data = http_json
|
||||||
|
existing_cache.ssl_data = ssl_json
|
||||||
existing_cache.checked_at = datetime.utcnow()
|
existing_cache.checked_at = datetime.utcnow()
|
||||||
else:
|
else:
|
||||||
# Create new cache entry
|
# Create new cache entry
|
||||||
@ -385,6 +392,9 @@ async def run_health_checks():
|
|||||||
status=new_status,
|
status=new_status,
|
||||||
score=report.score,
|
score=report.score,
|
||||||
signals=signals_json,
|
signals=signals_json,
|
||||||
|
dns_data=dns_json,
|
||||||
|
http_data=http_json,
|
||||||
|
ssl_data=ssl_json,
|
||||||
checked_at=datetime.utcnow(),
|
checked_at=datetime.utcnow(),
|
||||||
)
|
)
|
||||||
db.add(new_cache)
|
db.add(new_cache)
|
||||||
|
|||||||
@ -68,59 +68,73 @@ class PriceTracker:
|
|||||||
Returns:
|
Returns:
|
||||||
List of significant price changes
|
List of significant price changes
|
||||||
"""
|
"""
|
||||||
changes = []
|
changes: list[PriceChange] = []
|
||||||
now = datetime.utcnow()
|
now = datetime.utcnow()
|
||||||
cutoff = now - timedelta(hours=hours)
|
cutoff = now - timedelta(hours=hours)
|
||||||
|
|
||||||
# Get unique TLD/registrar combinations
|
# PERF: Avoid N+1 queries (distinct(tld, registrar) + per-pair LIMIT 2).
|
||||||
tld_registrars = await db.execute(
|
# We fetch the latest 2 rows per (tld, registrar) using a window function.
|
||||||
select(TLDPrice.tld, TLDPrice.registrar)
|
ranked = (
|
||||||
.distinct()
|
select(
|
||||||
|
TLDPrice.tld.label("tld"),
|
||||||
|
TLDPrice.registrar.label("registrar"),
|
||||||
|
TLDPrice.registration_price.label("price"),
|
||||||
|
TLDPrice.recorded_at.label("recorded_at"),
|
||||||
|
func.row_number()
|
||||||
|
.over(
|
||||||
|
partition_by=(TLDPrice.tld, TLDPrice.registrar),
|
||||||
|
order_by=TLDPrice.recorded_at.desc(),
|
||||||
)
|
)
|
||||||
|
.label("rn"),
|
||||||
|
)
|
||||||
|
).subquery()
|
||||||
|
|
||||||
for tld, registrar in tld_registrars:
|
rows = (
|
||||||
# Get the two most recent prices for this TLD/registrar
|
await db.execute(
|
||||||
result = await db.execute(
|
select(
|
||||||
select(TLDPrice)
|
ranked.c.tld,
|
||||||
.where(
|
ranked.c.registrar,
|
||||||
and_(
|
ranked.c.price,
|
||||||
TLDPrice.tld == tld,
|
ranked.c.recorded_at,
|
||||||
TLDPrice.registrar == registrar,
|
ranked.c.rn,
|
||||||
)
|
)
|
||||||
|
.where(ranked.c.rn <= 2)
|
||||||
|
.order_by(ranked.c.tld, ranked.c.registrar, ranked.c.rn)
|
||||||
)
|
)
|
||||||
.order_by(TLDPrice.recorded_at.desc())
|
).all()
|
||||||
.limit(2)
|
|
||||||
)
|
|
||||||
prices = result.scalars().all()
|
|
||||||
|
|
||||||
if len(prices) < 2:
|
from itertools import groupby
|
||||||
|
|
||||||
|
for (tld, registrar), grp in groupby(rows, key=lambda r: (r.tld, r.registrar)):
|
||||||
|
pair = list(grp)
|
||||||
|
if len(pair) < 2:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
new_price = prices[0]
|
newest = pair[0] if pair[0].rn == 1 else pair[1]
|
||||||
old_price = prices[1]
|
previous = pair[1] if pair[0].rn == 1 else pair[0]
|
||||||
|
|
||||||
# Check if change is within our time window
|
# Only consider if the newest price is within the requested window
|
||||||
if new_price.recorded_at < cutoff:
|
if newest.recorded_at is None or newest.recorded_at < cutoff:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Calculate change
|
if not previous.price or previous.price == 0:
|
||||||
if old_price.registration_price == 0:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
change_amount = new_price.registration_price - old_price.registration_price
|
change_amount = float(newest.price) - float(previous.price)
|
||||||
change_percent = (change_amount / old_price.registration_price) * 100
|
change_percent = (change_amount / float(previous.price)) * 100
|
||||||
|
|
||||||
# Only track significant changes
|
|
||||||
if abs(change_percent) >= self.SIGNIFICANT_CHANGE_THRESHOLD:
|
if abs(change_percent) >= self.SIGNIFICANT_CHANGE_THRESHOLD:
|
||||||
changes.append(PriceChange(
|
changes.append(
|
||||||
|
PriceChange(
|
||||||
tld=tld,
|
tld=tld,
|
||||||
registrar=registrar,
|
registrar=registrar,
|
||||||
old_price=old_price.registration_price,
|
old_price=float(previous.price),
|
||||||
new_price=new_price.registration_price,
|
new_price=float(newest.price),
|
||||||
change_amount=change_amount,
|
change_amount=change_amount,
|
||||||
change_percent=change_percent,
|
change_percent=change_percent,
|
||||||
detected_at=new_price.recorded_at,
|
detected_at=newest.recorded_at,
|
||||||
))
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Sort by absolute change percentage (most significant first)
|
# Sort by absolute change percentage (most significant first)
|
||||||
changes.sort(key=lambda x: abs(x.change_percent), reverse=True)
|
changes.sort(key=lambda x: abs(x.change_percent), reverse=True)
|
||||||
|
|||||||
63
backend/run_scheduler.py
Normal file
63
backend/run_scheduler.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Standalone scheduler runner for Pounce.
|
||||||
|
|
||||||
|
Runs APScheduler jobs without starting the FastAPI server.
|
||||||
|
Recommended for production to avoid duplicate jobs when running multiple API workers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import signal
|
||||||
|
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
# Load .env early (same as app/main.py)
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
from app.config import get_settings
|
||||||
|
from app.database import init_db
|
||||||
|
from app.scheduler import start_scheduler, stop_scheduler
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||||
|
)
|
||||||
|
logger = logging.getLogger("pounce.scheduler")
|
||||||
|
|
||||||
|
|
||||||
|
async def main() -> None:
|
||||||
|
settings = get_settings()
|
||||||
|
logger.info("Starting scheduler runner for %s...", settings.app_name)
|
||||||
|
|
||||||
|
# Ensure DB schema exists (create_all for new installs)
|
||||||
|
await init_db()
|
||||||
|
logger.info("Database initialized")
|
||||||
|
|
||||||
|
start_scheduler()
|
||||||
|
logger.info("Scheduler started")
|
||||||
|
|
||||||
|
stop_event = asyncio.Event()
|
||||||
|
|
||||||
|
def _request_shutdown(sig: signal.Signals) -> None:
|
||||||
|
logger.info("Received %s, shutting down scheduler...", sig.name)
|
||||||
|
stop_event.set()
|
||||||
|
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
for sig in (signal.SIGTERM, signal.SIGINT):
|
||||||
|
try:
|
||||||
|
loop.add_signal_handler(sig, lambda s=sig: _request_shutdown(s))
|
||||||
|
except NotImplementedError:
|
||||||
|
# Fallback (Windows / limited environments)
|
||||||
|
signal.signal(sig, lambda *_: _request_shutdown(sig))
|
||||||
|
|
||||||
|
await stop_event.wait()
|
||||||
|
|
||||||
|
stop_scheduler()
|
||||||
|
logger.info("Scheduler stopped. Bye.")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
|
|
||||||
|
|
||||||
@ -31,6 +31,7 @@ services:
|
|||||||
DATABASE_URL: postgresql+asyncpg://pounce:${DB_PASSWORD:-changeme}@db:5432/pounce
|
DATABASE_URL: postgresql+asyncpg://pounce:${DB_PASSWORD:-changeme}@db:5432/pounce
|
||||||
SECRET_KEY: ${SECRET_KEY:-change-this-in-production}
|
SECRET_KEY: ${SECRET_KEY:-change-this-in-production}
|
||||||
CORS_ORIGINS: ${CORS_ORIGINS:-http://localhost:3000}
|
CORS_ORIGINS: ${CORS_ORIGINS:-http://localhost:3000}
|
||||||
|
ENABLE_SCHEDULER: "false"
|
||||||
depends_on:
|
depends_on:
|
||||||
db:
|
db:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
@ -40,6 +41,22 @@ services:
|
|||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
|
|
||||||
|
# Scheduler (APScheduler) - runs background jobs in a separate process
|
||||||
|
scheduler:
|
||||||
|
build:
|
||||||
|
context: ./backend
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: pounce-scheduler
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
DATABASE_URL: postgresql+asyncpg://pounce:${DB_PASSWORD:-changeme}@db:5432/pounce
|
||||||
|
SECRET_KEY: ${SECRET_KEY:-change-this-in-production}
|
||||||
|
ENABLE_SCHEDULER: "true"
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
command: ["python", "run_scheduler.py"]
|
||||||
|
|
||||||
# Next.js Frontend
|
# Next.js Frontend
|
||||||
frontend:
|
frontend:
|
||||||
build:
|
build:
|
||||||
|
|||||||
@ -410,7 +410,8 @@ export default function WatchlistPage() {
|
|||||||
|
|
||||||
setLoadingHealth(prev => ({ ...prev, [domainId]: true }))
|
setLoadingHealth(prev => ({ ...prev, [domainId]: true }))
|
||||||
try {
|
try {
|
||||||
const report = await api.getDomainHealth(domainId)
|
// Force a live refresh when user explicitly triggers a check
|
||||||
|
const report = await api.getDomainHealth(domainId, { refresh: true })
|
||||||
setHealthReports(prev => ({ ...prev, [domainId]: report }))
|
setHealthReports(prev => ({ ...prev, [domainId]: report }))
|
||||||
setSelectedHealthDomainId(domainId)
|
setSelectedHealthDomainId(domainId)
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
@ -426,18 +427,16 @@ export default function WatchlistPage() {
|
|||||||
const loadHealthData = async () => {
|
const loadHealthData = async () => {
|
||||||
if (!domains || domains.length === 0) return
|
if (!domains || domains.length === 0) return
|
||||||
|
|
||||||
// Load health for registered domains only (not available ones)
|
// Load cached health for all domains in one request (fast path)
|
||||||
const registeredDomains = domains.filter(d => !d.is_available)
|
|
||||||
|
|
||||||
for (const domain of registeredDomains.slice(0, 10)) { // Limit to first 10 to avoid overload
|
|
||||||
try {
|
try {
|
||||||
const report = await api.getDomainHealth(domain.id)
|
const data = await api.getDomainsHealthCache()
|
||||||
setHealthReports(prev => ({ ...prev, [domain.id]: report }))
|
if (data?.reports) {
|
||||||
|
// API returns string keys; JS handles number indexing transparently.
|
||||||
|
setHealthReports(prev => ({ ...(prev as any), ...(data.reports as any) }))
|
||||||
|
}
|
||||||
} catch {
|
} catch {
|
||||||
// Silently fail - health data is optional
|
// Silently fail - health data is optional
|
||||||
}
|
}
|
||||||
await new Promise(r => setTimeout(r, 200)) // Small delay
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
loadHealthData()
|
loadHealthData()
|
||||||
|
|||||||
@ -386,8 +386,9 @@ class ApiClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Domain Health Check - 4-layer analysis (DNS, HTTP, SSL, WHOIS)
|
// Domain Health Check - 4-layer analysis (DNS, HTTP, SSL, WHOIS)
|
||||||
async getDomainHealth(domainId: number) {
|
async getDomainHealth(domainId: number, options?: { refresh?: boolean }) {
|
||||||
return this.request<DomainHealthReport>(`/domains/${domainId}/health`)
|
const refreshParam = options?.refresh ? '?refresh=true' : ''
|
||||||
|
return this.request<DomainHealthReport>(`/domains/${domainId}/health${refreshParam}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Quick health check for any domain (premium)
|
// Quick health check for any domain (premium)
|
||||||
@ -397,6 +398,16 @@ class ApiClient {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Bulk cached health reports for watchlist UI (fast)
|
||||||
|
async getDomainsHealthCache() {
|
||||||
|
return this.request<{
|
||||||
|
reports: Record<string, DomainHealthReport>
|
||||||
|
total_domains: number
|
||||||
|
cached_domains: number
|
||||||
|
timestamp: string
|
||||||
|
}>('/domains/health-cache')
|
||||||
|
}
|
||||||
|
|
||||||
// TLD Pricing
|
// TLD Pricing
|
||||||
async getTldOverview(
|
async getTldOverview(
|
||||||
limit = 25,
|
limit = 25,
|
||||||
|
|||||||
Reference in New Issue
Block a user