feat(scraping): server-only ops + stronger freshness guards
Some checks failed
CI / Frontend Lint & Type Check (push) Has been cancelled
CI / Frontend Build (push) Has been cancelled
CI / Backend Lint (push) Has been cancelled
CI / Backend Tests (push) Has been cancelled
CI / Docker Build (push) Has been cancelled
CI / Security Scan (push) Has been cancelled
Deploy / Build & Push Images (push) Has been cancelled
Deploy / Deploy to Server (push) Has been cancelled
Deploy / Notify (push) Has been cancelled

- Run auction cleanup every 5 minutes and treat end_time <= now as ended
- Add admin endpoints to upload/inspect Playwright cookies (free alternative to paid proxies)
- Add client-side guardrail to never render ended auctions in Terminal Market
This commit is contained in:
2025-12-11 22:23:00 +01:00
parent 3172df3fae
commit 3ac6831ffa
3 changed files with 71 additions and 7 deletions

View File

@ -9,6 +9,7 @@ Provides admin-only access to:
- Domain/Portfolio overview - Domain/Portfolio overview
""" """
from datetime import datetime, timedelta from datetime import datetime, timedelta
from pathlib import Path
from typing import Optional from typing import Optional
from fastapi import APIRouter, HTTPException, status, BackgroundTasks, Depends from fastapi import APIRouter, HTTPException, status, BackgroundTasks, Depends
from pydantic import BaseModel, EmailStr from pydantic import BaseModel, EmailStr
@ -25,8 +26,6 @@ from app.models.auction import DomainAuction
from app.models.price_alert import PriceAlert from app.models.price_alert import PriceAlert
router = APIRouter() router = APIRouter()
# ============== Admin Authentication ============== # ============== Admin Authentication ==============
async def require_admin( async def require_admin(
@ -41,6 +40,60 @@ async def require_admin(
return current_user return current_user
# ============== Scraping Ops (Server-only, free alternative to paid proxies) ==============
class PlaywrightCookiesUpload(BaseModel):
"""Upload Playwright cookies JSON used by protected scrapers (e.g. NameJet)."""
cookies: list[dict]
@router.post("/scraping/playwright-cookies")
async def upload_playwright_cookies(
payload: PlaywrightCookiesUpload,
admin: User = Depends(require_admin),
):
"""Replace the server's Playwright cookie jar file."""
cookie_dir = Path(__file__).parent.parent / "data" / "cookies"
cookie_dir.mkdir(parents=True, exist_ok=True)
cookie_file = cookie_dir / "session_cookies.json"
if not payload.cookies:
raise HTTPException(status_code=400, detail="cookies must not be empty")
try:
import json
cookie_file.write_text(json.dumps(payload.cookies, indent=2))
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to write cookie file: {e}")
return {
"status": "ok",
"cookies_count": len(payload.cookies),
"updated_at": datetime.utcnow().isoformat(),
"note": "Enable protected scraping with POUNCE_ENABLE_PROTECTED_SCRAPERS=true",
}
@router.get("/scraping/playwright-cookies")
async def get_playwright_cookie_status(
admin: User = Depends(require_admin),
):
"""Return Playwright cookie jar status (no contents)."""
cookie_dir = Path(__file__).parent.parent / "data" / "cookies"
cookie_file = cookie_dir / "session_cookies.json"
if not cookie_file.exists():
return {"exists": False}
stat = cookie_file.stat()
return {
"exists": True,
"path": str(cookie_file),
"size_bytes": stat.st_size,
"modified_at": datetime.utcfromtimestamp(stat.st_mtime).isoformat() + "Z",
}
# ============== Dashboard Stats ============== # ============== Dashboard Stats ==============
@router.get("/stats") @router.get("/stats")

View File

@ -542,12 +542,12 @@ def setup_scheduler():
replace_existing=True, replace_existing=True,
) )
# Cleanup expired auctions every 15 minutes (CRITICAL for data freshness!) # Cleanup expired auctions every 5 minutes (CRITICAL for data freshness!)
scheduler.add_job( scheduler.add_job(
cleanup_expired_auctions, cleanup_expired_auctions,
CronTrigger(minute='*/15'), # Every 15 minutes CronTrigger(minute='*/5'), # Every 5 minutes
id="auction_cleanup", id="auction_cleanup",
name="Expired Auction Cleanup (15m)", name="Expired Auction Cleanup (5m)",
replace_existing=True, replace_existing=True,
) )
@ -673,12 +673,12 @@ async def cleanup_expired_auctions():
async with AsyncSessionLocal() as db: async with AsyncSessionLocal() as db:
now = datetime.utcnow() now = datetime.utcnow()
# 1. Mark ended auctions as inactive # 1. Mark ended auctions as inactive (<= now to avoid "0m" linger)
stmt = ( stmt = (
update(DomainAuction) update(DomainAuction)
.where( .where(
and_( and_(
DomainAuction.end_time < now, DomainAuction.end_time <= now,
DomainAuction.is_active == True DomainAuction.is_active == True
) )
) )

View File

@ -362,6 +362,17 @@ export default function MarketPage() {
const filteredItems = useMemo(() => { const filteredItems = useMemo(() => {
let filtered = items let filtered = items
// Hard safety: never show ended auctions client-side.
// (Server already filters, this is a guardrail against any drift/cache.)
const nowMs = Date.now()
filtered = filtered.filter(item => {
if (item.status !== 'auction') return true
if (!item.end_time) return true
const t = Date.parse(item.end_time)
if (Number.isNaN(t)) return true
return t > (nowMs - 2000) // 2s grace
})
// Additional client-side search // Additional client-side search
if (searchQuery && !loading) { if (searchQuery && !loading) {
const query = searchQuery.toLowerCase() const query = searchQuery.toLowerCase()