This repository has been archived on 2026-02-24. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
market/scrape/persist_snapshot/save_link_reports.py
giles 6271a715a1
Some checks failed
Build and Deploy / build-and-deploy (push) Has been cancelled
feat: initialize market app with browsing, product, and scraping code
Split from coop monolith. Includes:
- Market/browse/product blueprints
- Product sync API
- Suma scraping pipeline
- Templates for market, browse, and product views
- Dockerfile and CI workflow for independent deployment
2026-02-09 23:16:34 +00:00

30 lines
931 B
Python

# at top of persist_snapshot.py:
from typing import List
from typing import Dict, List
from models.market import (
LinkError,
LinkExternal,
)
from db.session import get_session
# --- Models are unchanged, see original code ---
# ---------------------- Helper fns called from scraper ------------------------
async def save_link_reports(link_errors: List[Dict], link_externals: List[Dict]) -> None:
async with get_session() as session:
for e in link_errors:
session.add(LinkError(
product_slug=e.get("product"), href=e.get("href"), text=e.get("text"),
top=e.get("top"), sub=e.get("sub"), target_slug=e.get("target_slug"), type=e.get("type"),
))
for e in link_externals:
session.add(LinkExternal(
product_slug=e.get("product"), href=e.get("href"), text=e.get("text"), host=e.get("host"),
))
await session.commit()