This repository has been archived on 2026-02-24. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
market/scrape/build_snapshot/tools/capture_sub.py
giles 6271a715a1
Some checks failed
Build and Deploy / build-and-deploy (push) Has been cancelled
feat: initialize market app with browsing, product, and scraping code
Split from coop monolith. Includes:
- Market/browse/product blueprints
- Product sync API
- Suma scraping pipeline
- Templates for market, browse, and product views
- Dockerfile and CI workflow for independent deployment
2026-02-09 23:16:34 +00:00

23 lines
697 B
Python

from urllib.parse import urljoin
from urllib.parse import urljoin
from config import config
from utils import log
from ...listings import scrape_products
async def capture_sub(
sub,
slug,
):
sub_slug = sub.get("slug")
if not sub_slug:
return
sub_url = urljoin(config()["base_url"], f"/{slug}/{sub_slug}")
log(f"[{slug}/{sub_slug}] page 1…")
items_s, total_pages_s = await scrape_products(sub_url, page=1)
spmax = int(total_pages_s or 1)
for p in range(2, spmax + 1):
log(f"[{slug}/{sub_slug}] page {p}")
items_ps, _ = await scrape_products(sub_url, page=p)
items_s.extend(items_ps)
return (sub_url, items_s, total_pages_s)