perf: phase 1 db migrations, persisted scores, admin join, dashboard summary

This commit is contained in:
yves.gugger
2025-12-12 11:54:08 +01:00
parent 2e8ff50a90
commit ee4266d8f0
12 changed files with 514 additions and 178 deletions

View File

@ -17,6 +17,7 @@ from app.api.blog import router as blog_router
from app.api.listings import router as listings_router from app.api.listings import router as listings_router
from app.api.sniper_alerts import router as sniper_alerts_router from app.api.sniper_alerts import router as sniper_alerts_router
from app.api.seo import router as seo_router from app.api.seo import router as seo_router
from app.api.dashboard import router as dashboard_router
api_router = APIRouter() api_router = APIRouter()
@ -30,6 +31,7 @@ api_router.include_router(tld_prices_router, prefix="/tld-prices", tags=["TLD Pr
api_router.include_router(price_alerts_router, prefix="/price-alerts", tags=["Price Alerts"]) api_router.include_router(price_alerts_router, prefix="/price-alerts", tags=["Price Alerts"])
api_router.include_router(portfolio_router, prefix="/portfolio", tags=["Portfolio"]) api_router.include_router(portfolio_router, prefix="/portfolio", tags=["Portfolio"])
api_router.include_router(auctions_router, prefix="/auctions", tags=["Smart Pounce - Auctions"]) api_router.include_router(auctions_router, prefix="/auctions", tags=["Smart Pounce - Auctions"])
api_router.include_router(dashboard_router, prefix="/dashboard", tags=["Dashboard"])
# Marketplace (For Sale) - from analysis_3.md # Marketplace (For Sale) - from analysis_3.md
api_router.include_router(listings_router, prefix="/listings", tags=["Marketplace - For Sale"]) api_router.include_router(listings_router, prefix="/listings", tags=["Marketplace - For Sale"])

View File

@ -212,71 +212,72 @@ async def list_users(
search: Optional[str] = None, search: Optional[str] = None,
): ):
"""List all users with pagination and search.""" """List all users with pagination and search."""
query = select(User).order_by(desc(User.created_at)) # PERF: Avoid N+1 queries (subscription + domain_count per user).
domain_counts = (
if search: select(
query = query.where( Domain.user_id.label("user_id"),
User.email.ilike(f"%{search}%") | func.count(Domain.id).label("domain_count"),
User.name.ilike(f"%{search}%")
) )
.group_by(Domain.user_id)
query = query.offset(offset).limit(limit) .subquery()
result = await db.execute(query) )
users = result.scalars().all()
base = (
# Get total count select(
User,
Subscription,
func.coalesce(domain_counts.c.domain_count, 0).label("domain_count"),
)
.outerjoin(Subscription, Subscription.user_id == User.id)
.outerjoin(domain_counts, domain_counts.c.user_id == User.id)
)
if search:
base = base.where(
User.email.ilike(f"%{search}%") | User.name.ilike(f"%{search}%")
)
# Total count (for pagination UI)
count_query = select(func.count(User.id)) count_query = select(func.count(User.id))
if search: if search:
count_query = count_query.where( count_query = count_query.where(
User.email.ilike(f"%{search}%") | User.email.ilike(f"%{search}%") | User.name.ilike(f"%{search}%")
User.name.ilike(f"%{search}%")
) )
total = await db.execute(count_query) total = (await db.execute(count_query)).scalar() or 0
total = total.scalar()
result = await db.execute(
base.order_by(desc(User.created_at)).offset(offset).limit(limit)
)
rows = result.all()
user_list = [] user_list = []
for user in users: for user, subscription, domain_count in rows:
# Get subscription user_list.append(
sub_result = await db.execute( {
select(Subscription).where(Subscription.user_id == user.id) "id": user.id,
"email": user.email,
"name": user.name,
"is_active": user.is_active,
"is_verified": user.is_verified,
"is_admin": user.is_admin,
"created_at": user.created_at.isoformat(),
"last_login": user.last_login.isoformat() if user.last_login else None,
"domain_count": int(domain_count or 0),
"subscription": {
"tier": subscription.tier.value if subscription else "scout",
"tier_name": TIER_CONFIG.get(subscription.tier, {}).get("name", "Scout") if subscription else "Scout",
"status": subscription.status.value if subscription else None,
"domain_limit": subscription.domain_limit if subscription else 5,
} if subscription else {
"tier": "scout",
"tier_name": "Scout",
"status": None,
"domain_limit": 5,
},
}
) )
subscription = sub_result.scalar_one_or_none()
return {"users": user_list, "total": total, "limit": limit, "offset": offset}
# Get domain count
domain_count = await db.execute(
select(func.count(Domain.id)).where(Domain.user_id == user.id)
)
domain_count = domain_count.scalar()
user_list.append({
"id": user.id,
"email": user.email,
"name": user.name,
"is_active": user.is_active,
"is_verified": user.is_verified,
"is_admin": user.is_admin,
"created_at": user.created_at.isoformat(),
"last_login": user.last_login.isoformat() if user.last_login else None,
"domain_count": domain_count,
"subscription": {
"tier": subscription.tier.value if subscription else "scout",
"tier_name": TIER_CONFIG.get(subscription.tier, {}).get("name", "Scout") if subscription else "Scout",
"status": subscription.status.value if subscription else None,
"domain_limit": subscription.domain_limit if subscription else 5,
} if subscription else {
"tier": "scout",
"tier_name": "Scout",
"status": None,
"domain_limit": 5,
},
})
return {
"users": user_list,
"total": total,
"limit": limit,
"offset": offset,
}
# ============== User Export ============== # ============== User Export ==============
@ -290,9 +291,27 @@ async def export_users_csv(
"""Export all users as CSV data.""" """Export all users as CSV data."""
import csv import csv
import io import io
result = await db.execute(select(User).order_by(User.created_at)) domain_counts = (
users_list = result.scalars().all() select(
Domain.user_id.label("user_id"),
func.count(Domain.id).label("domain_count"),
)
.group_by(Domain.user_id)
.subquery()
)
result = await db.execute(
select(
User,
Subscription,
func.coalesce(domain_counts.c.domain_count, 0).label("domain_count"),
)
.outerjoin(Subscription, Subscription.user_id == User.id)
.outerjoin(domain_counts, domain_counts.c.user_id == User.id)
.order_by(User.created_at)
)
users_list = result.all()
# Create CSV # Create CSV
output = io.StringIO() output = io.StringIO()
@ -304,19 +323,7 @@ async def export_users_csv(
"Created At", "Last Login", "Tier", "Domain Limit", "Domains Used" "Created At", "Last Login", "Tier", "Domain Limit", "Domains Used"
]) ])
for user in users_list: for user, subscription, domain_count in users_list:
# Get subscription
sub_result = await db.execute(
select(Subscription).where(Subscription.user_id == user.id)
)
subscription = sub_result.scalar_one_or_none()
# Get domain count
domain_count = await db.execute(
select(func.count(Domain.id)).where(Domain.user_id == user.id)
)
domain_count = domain_count.scalar()
writer.writerow([ writer.writerow([
user.id, user.id,
user.email, user.email,
@ -328,7 +335,7 @@ async def export_users_csv(
user.last_login.strftime("%Y-%m-%d %H:%M") if user.last_login else "", user.last_login.strftime("%Y-%m-%d %H:%M") if user.last_login else "",
subscription.tier.value if subscription else "scout", subscription.tier.value if subscription else "scout",
subscription.domain_limit if subscription else 5, subscription.domain_limit if subscription else 5,
domain_count, int(domain_count or 0),
]) ])
return { return {

View File

@ -785,76 +785,16 @@ def _get_opportunity_reasoning(value_ratio: float, hours_left: float, num_bids:
def _calculate_pounce_score_v2(domain: str, tld: str, num_bids: int = 0, age_years: int = 0, is_pounce: bool = False) -> int: def _calculate_pounce_score_v2(domain: str, tld: str, num_bids: int = 0, age_years: int = 0, is_pounce: bool = False) -> int:
""" # Backward-compatible wrapper (shared implementation lives in services)
Pounce Score v2.0 - Enhanced scoring algorithm. from app.services.pounce_score import calculate_pounce_score_v2
Factors: return calculate_pounce_score_v2(
- Length (shorter = more valuable) domain,
- TLD premium tld,
- Market activity (bids) num_bids=num_bids,
- Age bonus age_years=age_years,
- Pounce Direct bonus (verified listings) is_pounce=is_pounce,
- Penalties (hyphens, numbers, etc.) )
"""
score = 50 # Baseline
name = domain.rsplit('.', 1)[0] if '.' in domain else domain
# A) LENGTH BONUS (exponential for short domains)
length_scores = {1: 50, 2: 45, 3: 40, 4: 30, 5: 20, 6: 15, 7: 10}
score += length_scores.get(len(name), max(0, 15 - len(name)))
# B) TLD PREMIUM
tld_scores = {
'com': 20, 'ai': 25, 'io': 18, 'co': 12,
'ch': 15, 'de': 10, 'net': 8, 'org': 8,
'app': 10, 'dev': 10, 'xyz': 5
}
score += tld_scores.get(tld.lower(), 0)
# C) MARKET ACTIVITY (bids = demand signal)
if num_bids >= 20:
score += 15
elif num_bids >= 10:
score += 10
elif num_bids >= 5:
score += 5
elif num_bids >= 2:
score += 2
# D) AGE BONUS (established domains)
if age_years and age_years > 15:
score += 10
elif age_years and age_years > 10:
score += 7
elif age_years and age_years > 5:
score += 3
# E) POUNCE DIRECT BONUS (verified = trustworthy)
if is_pounce:
score += 10
# F) PENALTIES
if '-' in name:
score -= 25
if any(c.isdigit() for c in name) and len(name) > 3:
score -= 20
if len(name) > 15:
score -= 15
# G) CONSONANT CHECK (no gibberish like "xkqzfgh")
consonants = 'bcdfghjklmnpqrstvwxyz'
max_streak = 0
current_streak = 0
for c in name.lower():
if c in consonants:
current_streak += 1
max_streak = max(max_streak, current_streak)
else:
current_streak = 0
if max_streak > 4:
score -= 15
return max(0, min(100, score))
def _is_premium_domain(domain_name: str) -> bool: def _is_premium_domain(domain_name: str) -> bool:
@ -1009,7 +949,7 @@ async def get_market_feed(
if source == "pounce": if source == "pounce":
listing_offset = offset listing_offset = offset
listing_limit = limit listing_limit = limit
if source == "external" and sort_by != "score": if source == "external":
auction_offset = offset auction_offset = offset
auction_limit = limit auction_limit = limit
@ -1070,8 +1010,11 @@ async def get_market_feed(
elif sort_by == "newest": elif sort_by == "newest":
auction_query = auction_query.order_by(DomainAuction.updated_at.desc()) auction_query = auction_query.order_by(DomainAuction.updated_at.desc())
else: else:
# score: we will compute score in Python (Phase 1 introduces persisted score) # score: prefer persisted score for DB-level sorting
auction_query = auction_query.order_by(DomainAuction.updated_at.desc()) auction_query = auction_query.order_by(
func.coalesce(DomainAuction.pounce_score, 0).desc(),
DomainAuction.updated_at.desc(),
)
auction_query = auction_query.offset(auction_offset).limit(auction_limit) auction_query = auction_query.offset(auction_offset).limit(auction_limit)
auctions = (await db.execute(auction_query)).scalars().all() auctions = (await db.execute(auction_query)).scalars().all()
@ -1081,13 +1024,15 @@ async def get_market_feed(
if current_user is None and not _is_premium_domain(auction.domain): if current_user is None and not _is_premium_domain(auction.domain):
continue continue
pounce_score = _calculate_pounce_score_v2( pounce_score = auction.pounce_score
auction.domain, if pounce_score is None:
auction.tld, pounce_score = _calculate_pounce_score_v2(
num_bids=auction.num_bids, auction.domain,
age_years=auction.age_years or 0, auction.tld,
is_pounce=False, num_bids=auction.num_bids,
) age_years=auction.age_years or 0,
is_pounce=False,
)
if pounce_score < min_score: if pounce_score < min_score:
continue continue

View File

@ -0,0 +1,105 @@
"""Dashboard summary endpoints (reduce frontend API round-trips)."""
from datetime import datetime, timedelta
from fastapi import APIRouter, Depends
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_current_user
from app.database import get_db
from app.models.auction import DomainAuction
from app.models.listing import DomainListing, ListingStatus
from app.models.user import User
# Reuse helpers for consistent formatting
from app.api.auctions import _format_time_remaining, _get_affiliate_url
from app.api.tld_prices import get_trending_tlds
router = APIRouter()
@router.get("/summary")
async def get_dashboard_summary(
current_user: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
"""
Return a compact dashboard payload used by `/terminal/radar`.
Goal: 1 request instead of multiple heavy round-trips.
"""
now = datetime.utcnow()
# -------------------------
# Market stats + preview
# -------------------------
active_auctions_filter = and_(DomainAuction.is_active == True, DomainAuction.end_time > now)
total_auctions = (await db.execute(select(func.count(DomainAuction.id)).where(active_auctions_filter))).scalar() or 0
cutoff = now + timedelta(hours=24)
ending_soon_filter = and_(
DomainAuction.is_active == True,
DomainAuction.end_time > now,
DomainAuction.end_time <= cutoff,
)
ending_soon_count = (await db.execute(select(func.count(DomainAuction.id)).where(ending_soon_filter))).scalar() or 0
ending_soon = (
await db.execute(
select(DomainAuction)
.where(ending_soon_filter)
.order_by(DomainAuction.end_time.asc())
.limit(5)
)
).scalars().all()
ending_soon_preview = [
{
"domain": a.domain,
"current_bid": a.current_bid,
"time_remaining": _format_time_remaining(a.end_time, now=now),
"platform": a.platform,
"affiliate_url": _get_affiliate_url(a.platform, a.domain, a.auction_url),
}
for a in ending_soon
]
# -------------------------
# Listings stats (user)
# -------------------------
listing_counts = (
await db.execute(
select(DomainListing.status, func.count(DomainListing.id))
.where(DomainListing.user_id == current_user.id)
.group_by(DomainListing.status)
)
).all()
by_status = {status: int(count) for status, count in listing_counts}
listing_stats = {
"active": by_status.get(ListingStatus.ACTIVE.value, 0),
"sold": by_status.get(ListingStatus.SOLD.value, 0),
"draft": by_status.get(ListingStatus.DRAFT.value, 0),
"total": sum(by_status.values()),
}
# -------------------------
# Trending TLDs (public data)
# -------------------------
trending = await get_trending_tlds(db)
return {
"market": {
"total_auctions": total_auctions,
"ending_soon": ending_soon_count,
"ending_soon_preview": ending_soon_preview,
},
"listings": listing_stats,
"tlds": trending,
"timestamp": now.isoformat(),
}

View File

@ -45,4 +45,7 @@ async def init_db():
"""Initialize database tables.""" """Initialize database tables."""
async with engine.begin() as conn: async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all) await conn.run_sync(Base.metadata.create_all)
# Apply additive migrations (indexes / optional columns) for existing DBs
from app.db_migrations import apply_migrations
await apply_migrations(conn)

View File

@ -0,0 +1,132 @@
"""
Lightweight, idempotent DB migrations.
This project historically used `Base.metadata.create_all()` for bootstrapping new installs.
That does NOT handle schema evolution on existing databases. For performance-related changes
(indexes, new optional columns), we apply additive migrations on startup.
Important:
- Only additive changes (ADD COLUMN / CREATE INDEX) should live here.
- Operations must be idempotent (safe to run on every startup).
"""
from __future__ import annotations
import logging
from typing import Any
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncConnection
logger = logging.getLogger(__name__)
async def _sqlite_table_exists(conn: AsyncConnection, table: str) -> bool:
res = await conn.execute(
text("SELECT 1 FROM sqlite_master WHERE type='table' AND name=:name LIMIT 1"),
{"name": table},
)
return res.scalar() is not None
async def _sqlite_has_column(conn: AsyncConnection, table: str, column: str) -> bool:
res = await conn.execute(text(f"PRAGMA table_info({table})"))
rows = res.fetchall()
# PRAGMA table_info: (cid, name, type, notnull, dflt_value, pk)
return any(r[1] == column for r in rows)
async def _postgres_table_exists(conn: AsyncConnection, table: str) -> bool:
# to_regclass returns NULL if the relation does not exist
res = await conn.execute(text("SELECT to_regclass(:name)"), {"name": table})
return res.scalar() is not None
async def _postgres_has_column(conn: AsyncConnection, table: str, column: str) -> bool:
res = await conn.execute(
text(
"""
SELECT 1
FROM information_schema.columns
WHERE table_schema = current_schema()
AND table_name = :table
AND column_name = :column
LIMIT 1
"""
),
{"table": table, "column": column},
)
return res.scalar() is not None
async def _table_exists(conn: AsyncConnection, table: str) -> bool:
dialect = conn.engine.dialect.name
if dialect == "sqlite":
return await _sqlite_table_exists(conn, table)
return await _postgres_table_exists(conn, table)
async def _has_column(conn: AsyncConnection, table: str, column: str) -> bool:
dialect = conn.engine.dialect.name
if dialect == "sqlite":
return await _sqlite_has_column(conn, table, column)
return await _postgres_has_column(conn, table, column)
async def apply_migrations(conn: AsyncConnection) -> None:
"""
Apply idempotent migrations.
Called on startup after `create_all()` to keep existing DBs up-to-date.
"""
dialect = conn.engine.dialect.name
logger.info("DB migrations: starting (dialect=%s)", dialect)
# ------------------------------------------------------------------
# 1) domain_auctions.pounce_score (enables DB-level sorting/pagination)
# ------------------------------------------------------------------
if await _table_exists(conn, "domain_auctions"):
if not await _has_column(conn, "domain_auctions", "pounce_score"):
logger.info("DB migrations: adding column domain_auctions.pounce_score")
await conn.execute(text("ALTER TABLE domain_auctions ADD COLUMN pounce_score INTEGER"))
# Index for feed ordering
await conn.execute(
text("CREATE INDEX IF NOT EXISTS ix_domain_auctions_pounce_score ON domain_auctions(pounce_score)")
)
# ---------------------------------------------------------
# 2) domain_checks index for history queries (watchlist UI)
# ---------------------------------------------------------
if await _table_exists(conn, "domain_checks"):
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_domain_checks_domain_id_checked_at "
"ON domain_checks(domain_id, checked_at)"
)
)
# ---------------------------------------------------
# 3) tld_prices composite index for trend computations
# ---------------------------------------------------
if await _table_exists(conn, "tld_prices"):
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_tld_prices_tld_registrar_recorded_at "
"ON tld_prices(tld, registrar, recorded_at)"
)
)
# ----------------------------------------------------
# 4) domain_listings pounce_score index (market sorting)
# ----------------------------------------------------
if await _table_exists(conn, "domain_listings"):
await conn.execute(
text(
"CREATE INDEX IF NOT EXISTS ix_domain_listings_pounce_score "
"ON domain_listings(pounce_score)"
)
)
logger.info("DB migrations: done")

View File

@ -53,6 +53,7 @@ class DomainAuction(Base):
age_years: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) age_years: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
backlinks: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) backlinks: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
domain_authority: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) domain_authority: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
pounce_score: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, index=True)
# Scraping metadata # Scraping metadata
scraped_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) scraped_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)

View File

@ -286,6 +286,21 @@ class AuctionScraperService:
} }
) )
# Persist pounce_score for DB-level sorting/filtering (Market feed)
try:
from app.services.pounce_score import calculate_pounce_score_v2
cleaned["pounce_score"] = calculate_pounce_score_v2(
domain,
tld,
num_bids=num_bids,
age_years=int(auction_data.get("age_years") or 0),
is_pounce=False,
)
except Exception:
# Score is optional; keep payload valid if anything goes wrong
cleaned["pounce_score"] = None
currency = cleaned.get("currency") or "USD" currency = cleaned.get("currency") or "USD"
cleaned["currency"] = str(currency).strip().upper() cleaned["currency"] = str(currency).strip().upper()

View File

@ -0,0 +1,116 @@
"""
Pounce Score calculation.
Used across:
- Market feed scoring
- Auction scraper (persist score for DB-level sorting)
- Listings (optional)
"""
from __future__ import annotations
from typing import Optional
def calculate_pounce_score_v2(
domain: str,
tld: Optional[str] = None,
*,
num_bids: int = 0,
age_years: int = 0,
is_pounce: bool = False,
) -> int:
"""
Pounce Score v2.0 - Enhanced scoring algorithm.
Factors:
- Length (shorter = more valuable)
- TLD premium
- Market activity (bids)
- Age bonus
- Pounce Direct bonus (verified listings)
- Penalties (hyphens, numbers, etc.)
"""
score = 50 # Baseline
domain = (domain or "").strip().lower()
if not domain:
return score
name = domain.rsplit(".", 1)[0] if "." in domain else domain
tld_clean = (tld or (domain.rsplit(".", 1)[-1] if "." in domain else "")).strip().lower().lstrip(".")
# A) LENGTH BONUS (exponential for short domains)
length_scores = {1: 50, 2: 45, 3: 40, 4: 30, 5: 20, 6: 15, 7: 10}
score += length_scores.get(len(name), max(0, 15 - len(name)))
# B) TLD PREMIUM
tld_scores = {
"com": 20,
"ai": 25,
"io": 18,
"co": 12,
"ch": 15,
"de": 10,
"net": 8,
"org": 8,
"app": 10,
"dev": 10,
"xyz": 5,
}
score += tld_scores.get(tld_clean, 0)
# C) MARKET ACTIVITY (bids = demand signal)
try:
bids = int(num_bids or 0)
except Exception:
bids = 0
if bids >= 20:
score += 15
elif bids >= 10:
score += 10
elif bids >= 5:
score += 5
elif bids >= 2:
score += 2
# D) AGE BONUS (established domains)
try:
age = int(age_years or 0)
except Exception:
age = 0
if age > 15:
score += 10
elif age > 10:
score += 7
elif age > 5:
score += 3
# E) POUNCE DIRECT BONUS (verified = trustworthy)
if is_pounce:
score += 10
# F) PENALTIES
if "-" in name:
score -= 25
if any(c.isdigit() for c in name) and len(name) > 3:
score -= 20
if len(name) > 15:
score -= 15
# G) CONSONANT CHECK (no gibberish like "xkqzfgh")
consonants = "bcdfghjklmnpqrstvwxyz"
max_streak = 0
current_streak = 0
for c in name.lower():
if c in consonants:
current_streak += 1
max_streak = max(max_streak, current_streak)
else:
current_streak = 0
if max_streak > 4:
score -= 15
return max(0, min(100, score))

View File

@ -16,8 +16,9 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from app.database import engine, Base from app.database import engine, Base
# Import all models to register them with SQLAlchemy # Import all models to register them with SQLAlchemy (ensures ALL tables are created)
from app.models import user, domain, tld_price, newsletter, portfolio, price_alert # noqa: F401 - imported for side effects
import app.models # noqa: F401
async def init_database(): async def init_database():
@ -27,6 +28,9 @@ async def init_database():
async with engine.begin() as conn: async with engine.begin() as conn:
# Create all tables # Create all tables
await conn.run_sync(Base.metadata.create_all) await conn.run_sync(Base.metadata.create_all)
# Apply additive migrations (indexes / optional columns)
from app.db_migrations import apply_migrations
await apply_migrations(conn)
print("✅ Database tables created successfully!") print("✅ Database tables created successfully!")
print("") print("")

View File

@ -188,29 +188,15 @@ export default function RadarPage() {
// Load Data // Load Data
const loadDashboardData = useCallback(async () => { const loadDashboardData = useCallback(async () => {
try { try {
const [endingSoonAuctions, allAuctionsData, trending, listings] = await Promise.all([ const summary = await api.getDashboardSummary()
api.getEndingSoonAuctions(24, 5).catch(() => []),
api.getAuctions().catch(() => ({ auctions: [], total: 0 })), setHotAuctions((summary.market.ending_soon_preview || []).slice(0, 5))
api.getTrendingTlds().catch(() => ({ trending: [] })),
api.request<any[]>('/listings/my').catch(() => [])
])
// Hot auctions for display (max 5)
setHotAuctions(endingSoonAuctions.slice(0, 5))
// Market stats - total opportunities from ALL auctions
setMarketStats({ setMarketStats({
totalAuctions: allAuctionsData.total || allAuctionsData.auctions?.length || 0, totalAuctions: summary.market.total_auctions || 0,
endingSoon: endingSoonAuctions.length endingSoon: summary.market.ending_soon || 0,
}) })
setTrendingTlds(summary.tlds?.trending?.slice(0, 6) || [])
setTrendingTlds(trending.trending?.slice(0, 6) || []) setListingStats(summary.listings || { active: 0, sold: 0, draft: 0, total: 0 })
// Calculate listing stats
const active = listings.filter(l => l.status === 'active').length
const sold = listings.filter(l => l.status === 'sold').length
const draft = listings.filter(l => l.status === 'draft').length
setListingStats({ active, sold, draft, total: listings.length })
} catch (error) { } catch (error) {
console.error('Failed to load dashboard data:', error) console.error('Failed to load dashboard data:', error)
} finally { } finally {

View File

@ -118,6 +118,26 @@ class ApiClient {
}>('/auth/me') }>('/auth/me')
} }
// Dashboard (Terminal Radar) - single call payload
async getDashboardSummary() {
return this.request<{
market: {
total_auctions: number
ending_soon: number
ending_soon_preview: Array<{
domain: string
current_bid: number
time_remaining: string
platform: string
affiliate_url?: string
}>
}
listings: { active: number; sold: number; draft: number; total: number }
tlds: { trending: Array<{ tld: string; reason: string; price_change: number; current_price: number }> }
timestamp: string
}>('/dashboard/summary')
}
async updateMe(data: { name?: string }) { async updateMe(data: { name?: string }) {
return this.request<{ return this.request<{
id: number id: number