feat: initialize market app with browsing, product, and scraping code
Some checks failed
Build and Deploy / build-and-deploy (push) Has been cancelled
Some checks failed
Build and Deploy / build-and-deploy (push) Has been cancelled
Split from coop monolith. Includes: - Market/browse/product blueprints - Product sync API - Suma scraping pipeline - Templates for market, browse, and product views - Dockerfile and CI workflow for independent deployment
This commit is contained in:
0
bp/__init__.py
Normal file
0
bp/__init__.py
Normal file
0
bp/api/__init__.py
Normal file
0
bp/api/__init__.py
Normal file
414
bp/api/routes.py
Normal file
414
bp/api/routes.py
Normal file
@@ -0,0 +1,414 @@
|
||||
# products_api_async.py
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from typing import Any, Dict, List, Tuple, Iterable, Optional
|
||||
|
||||
from quart import Blueprint, request, jsonify, g
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from scrape.persist_snapshot.log_product_result import _log_product_result
|
||||
from scrape.persist_snapshot.save_nav import _save_nav
|
||||
from scrape.persist_snapshot.capture_listing import _capture_listing
|
||||
from scrape.persist_snapshot.save_subcategory_redirects import _save_subcategory_redirects
|
||||
|
||||
# ⬇️ Import your models (names match your current file)
|
||||
from models.market import (
|
||||
Product,
|
||||
ProductImage,
|
||||
ProductSection,
|
||||
ProductLabel,
|
||||
ProductSticker,
|
||||
ProductAttribute,
|
||||
ProductNutrition,
|
||||
ProductAllergen,
|
||||
)
|
||||
|
||||
from suma_browser.app.redis_cacher import clear_cache
|
||||
from suma_browser.app.csrf import csrf_exempt
|
||||
|
||||
|
||||
products_api = Blueprint("products_api", __name__, url_prefix="/api/products")
|
||||
|
||||
# ---- Comparison config (matches your schema) --------------------------------
|
||||
|
||||
PRODUCT_FIELDS: List[str] = [
|
||||
"slug",
|
||||
"title",
|
||||
"image",
|
||||
"description_short",
|
||||
"description_html",
|
||||
"suma_href",
|
||||
"brand",
|
||||
"rrp", "rrp_currency", "rrp_raw",
|
||||
"price_per_unit", "price_per_unit_currency", "price_per_unit_raw",
|
||||
"special_price", "special_price_currency", "special_price_raw",
|
||||
"regular_price", "regular_price_currency", "regular_price_raw",
|
||||
"oe_list_price",
|
||||
"case_size_count", "case_size_item_qty", "case_size_item_unit", "case_size_raw",
|
||||
"ean", "sku", "unit_size", "pack_size",
|
||||
]
|
||||
|
||||
# rel_name -> (Model, fields_to_compare, key_for_orderless_compare)
|
||||
CHILD_SPECS: Dict[str, Tuple[Any, List[str], str]] = {
|
||||
"images": (ProductImage, ["url", "position", "kind"], "url"),
|
||||
"sections": (ProductSection, ["title", "html"], "title"),
|
||||
"labels": (ProductLabel, ["name"], "name"),
|
||||
"stickers": (ProductSticker, ["name"], "name"),
|
||||
"attributes": (ProductAttribute, ["key", "value"], "key"),
|
||||
"nutrition": (ProductNutrition, ["key", "value", "unit"], "key"),
|
||||
"allergens": (ProductAllergen, ["name", "contains"], "name"),
|
||||
}
|
||||
|
||||
def _now_utc():
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
def _norm_scalar(v: Any) -> Any:
|
||||
if isinstance(v, Decimal):
|
||||
s = format(v.normalize(), "f")
|
||||
return "0" if s in ("-0", "-0.0") else s
|
||||
if isinstance(v, bool):
|
||||
return bool(v)
|
||||
if isinstance(v, (int, float, str)) or v is None:
|
||||
return v
|
||||
return str(v)
|
||||
|
||||
def _normalize_row(obj: Dict[str, Any], keep: List[str]) -> Dict[str, Any]:
|
||||
out: Dict[str, Any] = {}
|
||||
for f in keep:
|
||||
val = obj.get(f)
|
||||
if isinstance(val, str):
|
||||
val = val.strip()
|
||||
out[f] = _norm_scalar(val)
|
||||
return out
|
||||
|
||||
def _list_to_index(items: Iterable[Dict[str, Any]], uniq: str) -> Dict[Any, Dict[str, Any]]:
|
||||
ix: Dict[Any, Dict[str, Any]] = {}
|
||||
for it in items or []:
|
||||
key = it.get(uniq)
|
||||
if key is None:
|
||||
continue
|
||||
ix[key] = it
|
||||
return ix
|
||||
|
||||
def _serialize_product_for_compare(p: Product) -> Dict[str, Any]:
|
||||
root: Dict[str, Any] = {f: _norm_scalar(getattr(p, f)) for f in PRODUCT_FIELDS}
|
||||
for rel_name, (_Model, fields, uniq) in CHILD_SPECS.items():
|
||||
rows: List[Dict[str, Any]] = []
|
||||
for child in getattr(p, rel_name) or []:
|
||||
rows.append({f: _norm_scalar(getattr(child, f)) for f in fields})
|
||||
root[rel_name] = _list_to_index(rows, uniq)
|
||||
return root
|
||||
|
||||
def _serialize_payload_for_compare(payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
root = _normalize_row(payload, PRODUCT_FIELDS)
|
||||
for rel_name, (_Model, fields, uniq) in CHILD_SPECS.items():
|
||||
rows = payload.get(rel_name) or []
|
||||
rows = [r for r in rows if isinstance(r, dict)]
|
||||
root[rel_name] = _list_to_index([_normalize_row(r, fields) for r in rows], uniq)
|
||||
return root
|
||||
|
||||
from decimal import Decimal, InvalidOperation
|
||||
|
||||
def _is_numeric_like(x) -> bool:
|
||||
if isinstance(x, bool):
|
||||
return False
|
||||
if isinstance(x, (int, float, Decimal)):
|
||||
return True
|
||||
if isinstance(x, str):
|
||||
s = x.strip()
|
||||
if not s:
|
||||
return False
|
||||
try:
|
||||
Decimal(s)
|
||||
return True
|
||||
except InvalidOperation:
|
||||
return False
|
||||
return False
|
||||
|
||||
def _to_decimal(x) -> Decimal:
|
||||
if isinstance(x, Decimal):
|
||||
return x
|
||||
if isinstance(x, bool) or x is None:
|
||||
raise InvalidOperation
|
||||
if isinstance(x, (int, str)):
|
||||
return Decimal(str(x).strip())
|
||||
if isinstance(x, float):
|
||||
return Decimal(str(x)) # avoid float fp artifacts
|
||||
# last resort: string-coerce
|
||||
return Decimal(str(x).strip())
|
||||
|
||||
def values_different(av, bv) -> bool:
|
||||
# match original None semantics first
|
||||
if bv is None:
|
||||
return av is not None
|
||||
if av is None:
|
||||
return True
|
||||
|
||||
if _is_numeric_like(bv):
|
||||
try:
|
||||
return _to_decimal(av) != _to_decimal(bv)
|
||||
except InvalidOperation:
|
||||
# av isn't numeric-parsable → different
|
||||
return True
|
||||
else:
|
||||
# non-numeric: compare as strings (like original)
|
||||
return f"{av}" != f"{bv}"
|
||||
|
||||
import re
|
||||
|
||||
_cf_a_re = re.compile(r'<a[^>]+/cdn-cgi/l/email-protection#[^"]+"[^>]*>(.*?)</a>', re.I | re.S)
|
||||
_cf_span_re = re.compile(r'<span[^>]*class="__cf_email__"[^>]*>(.*?)</span>', re.I | re.S)
|
||||
_cf_data_attr_re = re.compile(r'\sdata-cfemail="[^"]+"', re.I)
|
||||
_ws_re = re.compile(r'\s+')
|
||||
|
||||
def normalize_cf_email(html: str) -> str:
|
||||
if not isinstance(html, str):
|
||||
return html
|
||||
s = html
|
||||
# Replace CF spans with their inner text
|
||||
s = _cf_span_re.sub(r'\1', s)
|
||||
# Replace CF protection anchors with their inner text
|
||||
s = _cf_a_re.sub(r'\1', s)
|
||||
# Drop the data-cfemail attribute if any remains
|
||||
s = _cf_data_attr_re.sub('', s)
|
||||
# Optional: collapse whitespace
|
||||
s = _ws_re.sub(' ', s).strip()
|
||||
return s
|
||||
|
||||
|
||||
def _deep_equal(a: Dict[str, Any], b: Dict[str, Any]) -> bool:
|
||||
# keys must match at this level
|
||||
if a.keys() != b.keys():
|
||||
return False
|
||||
|
||||
for k in a.keys():
|
||||
av, bv = a[k], b[k]
|
||||
|
||||
# Dicts: recurse, but don't return early unless it's False
|
||||
if isinstance(av, dict) and isinstance(bv, dict):
|
||||
if not _deep_equal(av, bv):
|
||||
# log_diff(k, av, bv) # optional
|
||||
return False
|
||||
continue
|
||||
|
||||
# Lists/Tuples: compare length then elements (order-sensitive here)
|
||||
if isinstance(av, (list, tuple)) and isinstance(bv, (list, tuple)):
|
||||
if len(av) != len(bv):
|
||||
# log_diff(k, av, bv)
|
||||
return False
|
||||
for i, (ai, bi) in enumerate(zip(av, bv)):
|
||||
# nested dicts within lists
|
||||
if isinstance(ai, dict) and isinstance(bi, dict):
|
||||
if not _deep_equal(ai, bi):
|
||||
return False
|
||||
else:
|
||||
if values_different(normalize_cf_email(ai), normalize_cf_email(bi)):
|
||||
return False
|
||||
continue
|
||||
|
||||
# Scalars / everything else
|
||||
if values_different(normalize_cf_email(av), normalize_cf_email(bv)):
|
||||
# print('!!deep', k, av, bv)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
# ---- Mutation helpers -------------------------------------------------------
|
||||
|
||||
def _apply_product_fields(p: Product, payload: Dict[str, Any]) -> None:
|
||||
for f in PRODUCT_FIELDS:
|
||||
setattr(p, f, payload.get(f))
|
||||
p.updated_at = _now_utc()
|
||||
|
||||
def _replace_children(p: Product, payload: Dict[str, Any]) -> None:
|
||||
# replace each relation wholesale (delete-orphan takes care of removal)
|
||||
#p.images.clear()
|
||||
for row in payload.get("images") or []:
|
||||
p.images.append(ProductImage(
|
||||
url=row.get("url"),
|
||||
position=row.get("position") or 0,
|
||||
kind=row.get("kind") or "gallery",
|
||||
created_at=_now_utc(), updated_at=_now_utc(),
|
||||
))
|
||||
|
||||
#p.sections.clear()
|
||||
for row in payload.get("sections") or []:
|
||||
p.sections.append(ProductSection(
|
||||
title=row.get("title") or "",
|
||||
html=row.get("html") or "",
|
||||
created_at=_now_utc(), updated_at=_now_utc(),
|
||||
))
|
||||
|
||||
#p.labels.clear()
|
||||
for row in payload.get("labels") or []:
|
||||
p.labels.append(ProductLabel(
|
||||
name=row.get("name") or "",
|
||||
created_at=_now_utc(), updated_at=_now_utc(),
|
||||
))
|
||||
|
||||
#p.stickers.clear()
|
||||
for row in payload.get("stickers") or []:
|
||||
p.stickers.append(ProductSticker(
|
||||
name=row.get("name") or "",
|
||||
created_at=_now_utc(), updated_at=_now_utc(),
|
||||
))
|
||||
|
||||
#p.attributes.clear()
|
||||
for row in payload.get("attributes") or []:
|
||||
p.attributes.append(ProductAttribute(
|
||||
key=row.get("key") or "",
|
||||
value=row.get("value"),
|
||||
created_at=_now_utc(), updated_at=_now_utc(),
|
||||
))
|
||||
|
||||
#p.nutrition.clear()
|
||||
for row in payload.get("nutrition") or []:
|
||||
p.nutrition.append(ProductNutrition(
|
||||
key=row.get("key") or "",
|
||||
value=row.get("value"),
|
||||
unit=row.get("unit"),
|
||||
created_at=_now_utc(), updated_at=_now_utc(),
|
||||
))
|
||||
|
||||
#p.allergens.clear()
|
||||
for row in payload.get("allergens") or []:
|
||||
p.allergens.append(ProductAllergen(
|
||||
name=row.get("name") or "",
|
||||
contains=bool(row.get("contains", False)),
|
||||
created_at=_now_utc(), updated_at=_now_utc(),
|
||||
))
|
||||
|
||||
async def _create_product_from_payload(session: AsyncSession, payload: Dict[str, Any]) -> Product:
|
||||
p = Product()
|
||||
_apply_product_fields(p, payload)
|
||||
p.created_at = _now_utc()
|
||||
p.deleted_at = None
|
||||
session.add(p)
|
||||
#await session.flush() # get p.id
|
||||
_replace_children(p, payload)
|
||||
await session.flush()
|
||||
return p
|
||||
|
||||
# ---- API --------------------------------------------------------------------
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@products_api.post("/listing/")
|
||||
@clear_cache(tag='browse')
|
||||
async def capture_lsting():
|
||||
data: Dict[str, Any] = await request.get_json(force=True, silent=False)
|
||||
url = data['url']
|
||||
items = data['items']
|
||||
total_pages = data['total_pages']
|
||||
await _capture_listing(g.s, url,items, total_pages)
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@products_api.post("/log/")
|
||||
@clear_cache(tag='browse')
|
||||
async def log_product():
|
||||
data: Dict[str, Any] = await request.get_json(force=True, silent=False)
|
||||
ok = bool(data["ok"])
|
||||
|
||||
payload = data.get("payload") or {}
|
||||
try:
|
||||
await _log_product_result(g.s, ok, payload)
|
||||
return {"ok": True}
|
||||
except Exception as e:
|
||||
return {"ok": False}
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@products_api.post("/redirects/")
|
||||
@clear_cache(tag='browse')
|
||||
async def rediects():
|
||||
data: Dict[str, str] = await request.get_json(force=True, silent=False)
|
||||
await _save_subcategory_redirects(g.s, data)
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@products_api.post("/nav/")
|
||||
@clear_cache(tag='browse')
|
||||
async def save_nav():
|
||||
data: Dict[str, Any] = await request.get_json(force=True, silent=False)
|
||||
await _save_nav(g.s, data)
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@products_api.post("/sync/")
|
||||
@clear_cache(tag='browse')
|
||||
async def sync_product():
|
||||
"""
|
||||
POST /api/products/sync
|
||||
Body includes top-level fields and child arrays like:
|
||||
{
|
||||
"slug": "my-product",
|
||||
"title": "...",
|
||||
"images": [{"url":"https://..","position":0,"kind":"gallery"}],
|
||||
"sections": [{"title":"Details","html":"<p>..</p>"}],
|
||||
"labels": [{"name":"Vegan"}],
|
||||
"stickers": [{"name":"Sale"}],
|
||||
"attributes": [{"key":"Country","value":"UK"}],
|
||||
"nutrition": [{"key":"Energy","value":"100","unit":"kcal"}],
|
||||
"allergens": [{"name":"Nuts","contains":true}]
|
||||
}
|
||||
"""
|
||||
payload = await request.get_json(force=True, silent=False)
|
||||
if not isinstance(payload, dict):
|
||||
return jsonify({"error": "Invalid JSON"}), 400
|
||||
|
||||
slug = payload.get("slug")
|
||||
if not isinstance(slug, str) or not slug:
|
||||
return jsonify({"error": "Missing 'slug'"}), 400
|
||||
|
||||
|
||||
# find undeleted row by slug
|
||||
#stmt = select(Product).where(Product.slug == slug, Product.deleted_at.is_(None))
|
||||
|
||||
stmt = (
|
||||
select(Product)
|
||||
.where(Product.slug == slug, Product.deleted_at.is_(None))
|
||||
.options(
|
||||
selectinload(Product.images),
|
||||
selectinload(Product.sections),
|
||||
selectinload(Product.labels),
|
||||
selectinload(Product.stickers),
|
||||
selectinload(Product.attributes),
|
||||
selectinload(Product.nutrition),
|
||||
selectinload(Product.allergens),
|
||||
)
|
||||
)
|
||||
existing: Optional[Product] = (await g.s.execute(stmt)).scalars().first()
|
||||
|
||||
incoming_norm = _serialize_payload_for_compare(payload)
|
||||
|
||||
if existing:
|
||||
db_norm = _serialize_product_for_compare(existing)
|
||||
|
||||
if _deep_equal(db_norm, incoming_norm):
|
||||
# Exactly equal → just touch updated_at
|
||||
existing.updated_at = _now_utc()
|
||||
await g.s.flush()
|
||||
return jsonify({"id": existing.id, "action": "touched"}), 200
|
||||
|
||||
# Different → soft delete old + create a new row
|
||||
existing.deleted_at = _now_utc()
|
||||
await g.s.flush() # ensure the soft-delete is persisted before inserting the new row
|
||||
|
||||
new_p = await _create_product_from_payload(g.s, payload)
|
||||
await g.s.flush()
|
||||
return jsonify({"id": new_p.id, "action": "replaced"}), 201
|
||||
|
||||
# Not found → create
|
||||
new_p = await _create_product_from_payload(g.s, payload)
|
||||
await g.s.flush()
|
||||
return jsonify({"id": new_p.id, "action": "created"}), 201
|
||||
|
||||
7
bp/browse/__init__.py
Normal file
7
bp/browse/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
# create the blueprint at package import time
|
||||
from .routes import register # = Blueprint("browse_bp", __name__)
|
||||
|
||||
# import routes AFTER browse_bp is defined so routes can attach to it
|
||||
from . import routes # noqa: F401
|
||||
162
bp/browse/routes.py
Normal file
162
bp/browse/routes.py
Normal file
@@ -0,0 +1,162 @@
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
from quart import (
|
||||
g,
|
||||
Blueprint,
|
||||
abort,
|
||||
render_template,
|
||||
render_template_string,
|
||||
make_response,
|
||||
current_app,
|
||||
)
|
||||
from config import config
|
||||
from .services.nav import category_context, get_nav
|
||||
from .services.blacklist.category import is_category_blocked
|
||||
|
||||
from .services import (
|
||||
_hx_fragment_request,
|
||||
_productInfo,
|
||||
_vary,
|
||||
_current_url_without_page,
|
||||
)
|
||||
|
||||
from suma_browser.app.redis_cacher import cache_page
|
||||
from suma_browser.app.utils.htmx import is_htmx_request
|
||||
|
||||
def register():
|
||||
browse_bp = Blueprint("browse", __name__)
|
||||
|
||||
from .. import register_product
|
||||
browse_bp.register_blueprint(
|
||||
register_product(),
|
||||
)
|
||||
|
||||
@browse_bp.get("/")
|
||||
@cache_page(tag="browse")
|
||||
async def home():
|
||||
"""
|
||||
Market landing page.
|
||||
Shows the Ghost CMS post with slug='market'.
|
||||
"""
|
||||
from shared.internal_api import get as api_get
|
||||
|
||||
# Fetch the market post from coop internal API
|
||||
p_data = await api_get("coop", "/internal/post/market")
|
||||
if not p_data:
|
||||
abort(404)
|
||||
|
||||
# Determine which template to use based on request type
|
||||
if not is_htmx_request():
|
||||
# Normal browser request: full page with layout
|
||||
html = await render_template("_types/market/index.html", **p_data)
|
||||
else:
|
||||
# HTMX request: main panel + OOB elements
|
||||
html = await render_template("_types/market/_oob_elements.html", **p_data)
|
||||
|
||||
return await make_response(html)
|
||||
|
||||
@browse_bp.get("/all/")
|
||||
@cache_page(tag="browse")
|
||||
async def browse_all():
|
||||
"""
|
||||
Browse all products across all categories.
|
||||
Renders full page or just product cards (HTMX pagination fragment).
|
||||
"""
|
||||
nav = await get_nav(g.s)
|
||||
ctx = {
|
||||
"category_label": "All Products",
|
||||
"top_slug": "all",
|
||||
"sub_slug": None,
|
||||
}
|
||||
|
||||
product_info = await _productInfo()
|
||||
full_context = {**product_info, **ctx}
|
||||
|
||||
# Determine which template to use based on request type and pagination
|
||||
if not is_htmx_request():
|
||||
# Normal browser request: full page with layout
|
||||
html = await render_template("_types/browse/index.html", **full_context)
|
||||
elif product_info["page"] > 1:
|
||||
# HTMX pagination: just product cards + sentinel
|
||||
html = await render_template("_types/browse/_product_cards.html", **product_info)
|
||||
else:
|
||||
# HTMX navigation (page 1): main panel + OOB elements
|
||||
html = await render_template("_types/browse/_oob_elements.html", **full_context)
|
||||
|
||||
resp = await make_response(html)
|
||||
resp.headers["Hx-Push-Url"] = _current_url_without_page()
|
||||
return _vary(resp)
|
||||
|
||||
|
||||
@browse_bp.get("/<top_slug>/")
|
||||
@cache_page(tag="browse")
|
||||
async def browse_top(top_slug: str):
|
||||
"""
|
||||
Browse by top-level category (e.g. /fruit).
|
||||
404 if category not in allowed list or is blocked.
|
||||
"""
|
||||
REVERSE_CATEGORY = {v: k for k, v in config()["categories"]["allow"].items()}
|
||||
if top_slug not in REVERSE_CATEGORY:
|
||||
abort(404)
|
||||
if is_category_blocked(top_slug):
|
||||
abort(404)
|
||||
|
||||
nav = await get_nav(g.s)
|
||||
ctx = category_context(top_slug, None, nav)
|
||||
|
||||
product_info = await _productInfo(top_slug)
|
||||
full_context = {**product_info, **ctx}
|
||||
|
||||
# Determine which template to use based on request type and pagination
|
||||
if not is_htmx_request():
|
||||
# Normal browser request: full page with layout
|
||||
html = await render_template("_types/browse/index.html", **full_context)
|
||||
elif product_info["page"] > 1:
|
||||
# HTMX pagination: just product cards + sentinel
|
||||
html = await render_template("_types/browse/_product_cards.html", **product_info)
|
||||
else:
|
||||
html = await render_template("_types/browse/_oob_elements.html", **full_context)
|
||||
|
||||
resp = await make_response(html)
|
||||
resp.headers["Hx-Push-Url"] = _current_url_without_page()
|
||||
return _vary(resp)
|
||||
|
||||
|
||||
@browse_bp.get("/<top_slug>/<sub_slug>/")
|
||||
@cache_page(tag="browse")
|
||||
async def browse_sub(top_slug: str, sub_slug: str):
|
||||
"""
|
||||
Browse by subcategory (e.g. /fruit/citrus).
|
||||
404 if blocked or unknown.
|
||||
"""
|
||||
REVERSE_CATEGORY = {v: k for k, v in config()["categories"]["allow"].items()}
|
||||
if top_slug not in REVERSE_CATEGORY:
|
||||
abort(404)
|
||||
if is_category_blocked(top_slug, sub_slug):
|
||||
abort(404)
|
||||
|
||||
nav = await get_nav(g.s)
|
||||
ctx = category_context(top_slug, sub_slug, nav)
|
||||
|
||||
product_info = await _productInfo(top_slug, sub_slug)
|
||||
full_context = {**product_info, **ctx}
|
||||
|
||||
# Determine which template to use based on request type and pagination
|
||||
if not is_htmx_request():
|
||||
# Normal browser request: full page with layout
|
||||
html = await render_template("_types/browse/index.html", **full_context)
|
||||
elif product_info["page"] > 1:
|
||||
# HTMX pagination: just product cards + sentinel
|
||||
html = await render_template("_types/browse/_product_cards.html", **product_info)
|
||||
else:
|
||||
# HTMX navigation (page 1): main panel + OOB elements
|
||||
html = await render_template("_types/browse/_oob_elements.html", **full_context)
|
||||
|
||||
resp = await make_response(html)
|
||||
resp.headers["Hx-Push-Url"] = _current_url_without_page()
|
||||
return _vary(resp)
|
||||
|
||||
|
||||
|
||||
return browse_bp
|
||||
13
bp/browse/services/__init__.py
Normal file
13
bp/browse/services/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from __future__ import annotations
|
||||
from quart import Blueprint
|
||||
|
||||
|
||||
from .services import (
|
||||
_hx_fragment_request,
|
||||
_productInfo,
|
||||
_order_brands_selected_first,
|
||||
_massage_product,
|
||||
_vary,
|
||||
_current_url_without_page,
|
||||
_is_liked
|
||||
)
|
||||
12
bp/browse/services/blacklist/category.py
Normal file
12
bp/browse/services/blacklist/category.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# suma_browser/category_blacklist.py
|
||||
from __future__ import annotations
|
||||
from typing import Optional
|
||||
from config import config
|
||||
|
||||
def _norm(s: str) -> str:
|
||||
return (s or "").strip().lower().strip("/")
|
||||
|
||||
def is_category_blocked(top_slug: str, sub_slug: Optional[str] = None) -> bool:
|
||||
if sub_slug:
|
||||
return is_category_blocked(top_slug) or _norm(f"{top_slug}/{sub_slug}") in config()["blacklist"]["category"]
|
||||
return _norm(top_slug) in config()["blacklist"]["category"]
|
||||
15
bp/browse/services/blacklist/product.py
Normal file
15
bp/browse/services/blacklist/product.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from typing import Set, Optional
|
||||
from ..slugs import canonical_html_slug
|
||||
from config import config
|
||||
|
||||
_blocked: Set[str] = set()
|
||||
_mtime: Optional[float] = None
|
||||
|
||||
def _norm(slug: str) -> str:
|
||||
slug = (slug or "").strip().strip("/").lower()
|
||||
if slug.startswith("product/"):
|
||||
slug = slug.split("/", 1)[1]
|
||||
return canonical_html_slug(slug)
|
||||
|
||||
def is_product_blocked(slug: str) -> bool:
|
||||
return _norm(slug) in config()["blacklist"]["product"]
|
||||
11
bp/browse/services/blacklist/product_details.py
Normal file
11
bp/browse/services/blacklist/product_details.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import re
|
||||
from config import config
|
||||
|
||||
def _norm_title_key(t: str) -> str:
|
||||
t = (t or "").strip().lower()
|
||||
t = re.sub(r":\s*$", "", t)
|
||||
t = re.sub(r"\s+", " ", t)
|
||||
return t
|
||||
|
||||
def is_blacklisted_heading(title: str) -> bool:
|
||||
return _norm_title_key(title) in [s.lower() for s in config()["blacklist"]["product-details"]]
|
||||
367
bp/browse/services/cache_backend.py
Normal file
367
bp/browse/services/cache_backend.py
Normal file
@@ -0,0 +1,367 @@
|
||||
from __future__ import annotations
|
||||
import os, json
|
||||
from typing import List, Optional
|
||||
from config import config
|
||||
from .blacklist.product import is_product_blocked
|
||||
|
||||
|
||||
def _json(path: str):
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def fs_nav():
|
||||
path = os.path.join(config()["cache"]["fs_root"], "nav.json")
|
||||
return _json(path)
|
||||
|
||||
|
||||
def _brand_of(item: dict) -> str:
|
||||
b = (item.get("brand") or "").strip()
|
||||
if b:
|
||||
return b
|
||||
try:
|
||||
return (item.get("info_table", {}).get("Brand") or "").strip()
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def _stickers_of(item: dict) -> List[str]:
|
||||
vals = item.get("stickers") or []
|
||||
out = []
|
||||
for v in vals:
|
||||
s = (str(v) or "").strip().lower()
|
||||
if s:
|
||||
out.append(s)
|
||||
return out
|
||||
|
||||
|
||||
def fs_product_by_slug(slug: str):
|
||||
slug = (slug or "").strip()
|
||||
if slug.endswith(".json"):
|
||||
path = os.path.join(config()["cache"]["fs_root"], "products", slug)
|
||||
else:
|
||||
path = os.path.join(config()["cache"]["fs_root"], "products", f"{slug}.json")
|
||||
return _json(path)
|
||||
|
||||
|
||||
def fs_count_products_in_sub(top_slug: str, sub_slug: Optional[str]) -> int:
|
||||
"""
|
||||
Return how many products are in the listing for (top_slug, sub_slug),
|
||||
after filtering out blocked products.
|
||||
|
||||
If sub_slug is None, that's the top-level category listing.
|
||||
"""
|
||||
fs_root = config()["cache"]["fs_root"]
|
||||
|
||||
# Build path to listings/.../items.json just like fs_products does
|
||||
parts = ["listings", top_slug]
|
||||
if sub_slug:
|
||||
parts.append(sub_slug)
|
||||
parts.append("items.json")
|
||||
|
||||
path = os.path.join(fs_root, *parts)
|
||||
if not os.path.exists(path):
|
||||
return 0
|
||||
|
||||
try:
|
||||
all_slugs = _json(path)
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
# Filter out blocked products
|
||||
allowed = [
|
||||
slug for slug in all_slugs
|
||||
if not is_product_blocked(slug)
|
||||
]
|
||||
return len(allowed)
|
||||
|
||||
|
||||
def fs_products(
|
||||
top_slug: str | None,
|
||||
sub_slug: str | None,
|
||||
selected_brands: Optional[List[str]] = None,
|
||||
selected_stickers: Optional[List[str]] = None,
|
||||
selected_labels: Optional[List[str]] = None,
|
||||
page: int = 1,
|
||||
search: Optional[str] = None,
|
||||
sort: Optional[str] = None,
|
||||
page_size: int = 20,
|
||||
|
||||
# NEW: only include products the current user has liked
|
||||
liked_slugs: Optional[List[str]] = None,
|
||||
liked: bool = None,
|
||||
):
|
||||
"""
|
||||
Returns:
|
||||
{
|
||||
"total_pages": int,
|
||||
"items": [product dict ...], # filtered + paginated (sorted)
|
||||
"brands": [{"name": str, "count": int}],
|
||||
"stickers": [{"name": str, "count": int}],
|
||||
"labels": [{"name": str, "count": int}],
|
||||
}
|
||||
|
||||
Filters:
|
||||
- top_slug / sub_slug scope
|
||||
- selected_brands
|
||||
- selected_stickers
|
||||
- selected_labels
|
||||
- search
|
||||
- liked_slugs (if provided)
|
||||
"""
|
||||
|
||||
import os
|
||||
from typing import List, Dict
|
||||
|
||||
fs_root = config()["cache"]["fs_root"]
|
||||
|
||||
# ---------- Collect slugs ----------
|
||||
slugs: List[str] = []
|
||||
if top_slug: # normal listing path
|
||||
parts = ["listings", top_slug]
|
||||
if sub_slug:
|
||||
parts.append(sub_slug)
|
||||
parts.append("items.json")
|
||||
path = os.path.join(fs_root, *parts)
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
slugs = [s for s in _json(path) if not is_product_blocked(s)]
|
||||
except Exception:
|
||||
slugs = []
|
||||
else:
|
||||
# No top slug: include ALL products from /products/*.json
|
||||
products_dir = os.path.join(fs_root, "products")
|
||||
try:
|
||||
for fname in os.listdir(products_dir):
|
||||
if not fname.endswith(".json"):
|
||||
continue
|
||||
slug = fname[:-5] # strip .json
|
||||
if not is_product_blocked(slug):
|
||||
slugs.append(slug)
|
||||
except FileNotFoundError:
|
||||
slugs = []
|
||||
|
||||
# ---------- Load product dicts ----------
|
||||
all_items: List[dict] = []
|
||||
for slug in slugs:
|
||||
try:
|
||||
item = fs_product_by_slug(slug)
|
||||
if isinstance(item, dict):
|
||||
all_items.append(item)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Stable deterministic ordering when aggregating everything (name ASC)
|
||||
def _title_key(it: dict) -> tuple:
|
||||
title = (it.get("title") or it.get("name") or it.get("slug") or "").strip().lower()
|
||||
return (title, it.get("slug") or "")
|
||||
|
||||
all_items.sort(key=_title_key)
|
||||
|
||||
# ---------- Helpers for filters & counts ----------
|
||||
def _brand_of_local(item: dict) -> str:
|
||||
b = item.get("brand") or (item.get("info_table") or {}).get("Brand")
|
||||
return (b or "").strip()
|
||||
|
||||
def _stickers_of_local(item: dict) -> List[str]:
|
||||
vals = item.get("stickers") or []
|
||||
out = []
|
||||
for s in vals:
|
||||
if isinstance(s, str):
|
||||
s2 = s.strip().lower()
|
||||
if s2:
|
||||
out.append(s2)
|
||||
return out
|
||||
|
||||
def _labels_of_local(item: dict) -> List[str]:
|
||||
vals = item.get("labels") or []
|
||||
out = []
|
||||
for s in vals:
|
||||
if isinstance(s, str):
|
||||
s2 = s.strip().lower()
|
||||
if s2:
|
||||
out.append(s2)
|
||||
return out
|
||||
|
||||
sel_brands = [
|
||||
(s or "").strip().lower()
|
||||
for s in (selected_brands or [])
|
||||
if (s or "").strip()
|
||||
]
|
||||
sel_stickers = [
|
||||
(s or "").strip().lower()
|
||||
for s in (selected_stickers or [])
|
||||
if (s or "").strip()
|
||||
]
|
||||
sel_labels = [
|
||||
(s or "").strip().lower()
|
||||
for s in (selected_labels or [])
|
||||
if (s or "").strip()
|
||||
]
|
||||
search_q = (search or "").strip().lower() or None
|
||||
|
||||
liked_set = {
|
||||
(slug or "").strip().lower()
|
||||
for slug in (liked_slugs or [] if liked else [])
|
||||
if (slug or "").strip()
|
||||
}
|
||||
|
||||
real_liked_set = {
|
||||
(slug or "").strip().lower()
|
||||
for slug in (liked_slugs or [])
|
||||
if (slug or "").strip()
|
||||
}
|
||||
|
||||
def matches_brand(item: dict) -> bool:
|
||||
if not sel_brands:
|
||||
return True
|
||||
return _brand_of_local(item).strip().lower() in sel_brands
|
||||
|
||||
def has_all_selected_stickers(item: dict) -> bool:
|
||||
if not sel_stickers:
|
||||
return True
|
||||
tags = set(_stickers_of_local(item))
|
||||
return all(s in tags for s in sel_stickers)
|
||||
|
||||
def has_all_selected_labels(item: dict) -> bool:
|
||||
if not sel_labels:
|
||||
return True
|
||||
tags = set(_labels_of_local(item))
|
||||
return all(s in tags for s in sel_labels)
|
||||
|
||||
def matches_search(item: dict) -> bool:
|
||||
if not search_q:
|
||||
return True
|
||||
desc = (item.get("description_short") or "").strip().lower()
|
||||
return search_q in desc
|
||||
|
||||
def is_liked(item: dict) -> bool:
|
||||
"""
|
||||
True if this item should be shown under the liked filter.
|
||||
If liked_set is empty, treat everything as allowed.
|
||||
"""
|
||||
slug_val = (item.get("slug") or "").strip().lower()
|
||||
return slug_val in real_liked_set
|
||||
|
||||
# ---------- Counts (dependent on other filters + search + liked) ----------
|
||||
brand_counts: Dict[str, int] = {}
|
||||
for b in (selected_brands or []):
|
||||
brand_counts[b] = 0
|
||||
|
||||
for it in all_items:
|
||||
b = _brand_of_local(it)
|
||||
if not b:
|
||||
continue
|
||||
brand_counts[b] = brand_counts.get(b, 0) + 1
|
||||
|
||||
sticker_counts: Dict[str, int] = {}
|
||||
for s in (selected_stickers or []):
|
||||
sticker_counts[s] = 0
|
||||
for it in all_items:
|
||||
for s in _stickers_of_local(it):
|
||||
sticker_counts[s] = sticker_counts.get(s, 0) + 1
|
||||
|
||||
label_counts: Dict[str, int] = {}
|
||||
for s in (selected_labels or []):
|
||||
label_counts[s] = 0
|
||||
for it in all_items:
|
||||
for s in _labels_of_local(it):
|
||||
label_counts[s] = label_counts.get(s, 0) + 1
|
||||
|
||||
liked_count = 0
|
||||
for it in all_items:
|
||||
if is_liked(it):
|
||||
liked_count += 1
|
||||
|
||||
search_count=0
|
||||
for it in all_items:
|
||||
if matches_search(it):
|
||||
search_count += 1
|
||||
|
||||
|
||||
# ---------- Apply filters ----------
|
||||
filtered = [
|
||||
it
|
||||
for it in all_items
|
||||
if matches_brand(it)
|
||||
and has_all_selected_stickers(it)
|
||||
and has_all_selected_labels(it)
|
||||
and matches_search(it)
|
||||
and (not liked or is_liked(it))
|
||||
]
|
||||
|
||||
# ---------- Sorting ----------
|
||||
sort_mode = (sort or "az").strip().lower()
|
||||
|
||||
def _price_key(item: dict):
|
||||
p = item["regular_price"]
|
||||
title, slug = _title_key(item)
|
||||
return (0 if p is not None else 1, p if p is not None else 0, title, slug)
|
||||
|
||||
def _price_key_desc(item: dict):
|
||||
p = item["regular_price"]
|
||||
title, slug = _title_key(item)
|
||||
return (
|
||||
0 if p is not None else 1,
|
||||
-(p if p is not None else 0),
|
||||
title,
|
||||
slug,
|
||||
)
|
||||
|
||||
if sort_mode in ("az",):
|
||||
filtered.sort(key=_title_key)
|
||||
elif sort_mode in ("za",):
|
||||
filtered.sort(key=_title_key, reverse=True)
|
||||
elif sort_mode in (
|
||||
"price-asc", "price_asc", "price-low", "price-low-high", "low-high", "lo-hi"
|
||||
):
|
||||
filtered.sort(key=_price_key)
|
||||
elif sort_mode in (
|
||||
"price-desc", "price_desc", "price-high", "price-high-low", "high-low", "hi-lo"
|
||||
):
|
||||
filtered.sort(key=_price_key_desc)
|
||||
else:
|
||||
filtered.sort(key=_title_key)
|
||||
|
||||
# ---------- Pagination ----------
|
||||
total_pages = max(1, (len(filtered) + page_size - 1) // page_size)
|
||||
page = max(1, page)
|
||||
start = (page - 1) * page_size
|
||||
end = start + page_size
|
||||
page_items = filtered[start:end]
|
||||
# ---------- Format counts lists ----------
|
||||
brands_list = sorted(
|
||||
[{"name": k, "count": v} for k, v in brand_counts.items()],
|
||||
key=lambda x: (-x["count"], x["name"].lower()),
|
||||
)
|
||||
stickers_list = sorted(
|
||||
[{"name": k, "count": v} for k, v in sticker_counts.items()],
|
||||
key=lambda x: (-x["count"], x["name"]),
|
||||
)
|
||||
labels_list = sorted(
|
||||
[{"name": k, "count": v} for k, v in label_counts.items()],
|
||||
key=lambda x: (-x["count"], x["name"]),
|
||||
)
|
||||
return {
|
||||
"total_pages": total_pages,
|
||||
"items": page_items,
|
||||
"brands": brands_list,
|
||||
"stickers": stickers_list,
|
||||
"labels": labels_list,
|
||||
"liked_count": liked_count,
|
||||
"search_count": search_count
|
||||
}
|
||||
|
||||
# async wrappers (unchanged)
|
||||
async def read_nav():
|
||||
return fs_nav()
|
||||
|
||||
async def read_listing(top_slug: str, sub_slug: str | None, page: int):
|
||||
return fs_products(top_slug, sub_slug, None, None, page)
|
||||
|
||||
async def read_product(slug_or_path: str):
|
||||
slug = (slug_or_path or "").strip()
|
||||
if "/" in slug:
|
||||
slug = slug.rsplit("/", 1)[-1]
|
||||
slug = slug.split("?", 1)[0]
|
||||
return fs_product_by_slug(slug)
|
||||
657
bp/browse/services/db_backend.py
Normal file
657
bp/browse/services/db_backend.py
Normal file
@@ -0,0 +1,657 @@
|
||||
from __future__ import annotations
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from sqlalchemy import select, and_
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from config import config # if unused elsewhere, you can remove this import
|
||||
|
||||
# ORM models
|
||||
from models.market import (
|
||||
Product, ProductImage, ProductSection,
|
||||
Listing, ListingItem,
|
||||
NavTop, NavSub,
|
||||
ProductSticker, ProductLabel,
|
||||
ProductAttribute, ProductNutrition, ProductAllergen, ProductLike
|
||||
|
||||
)
|
||||
from sqlalchemy import func, case
|
||||
|
||||
|
||||
# ---------- helpers ----------
|
||||
def _regular_price_of(p: Product) -> Optional[float]:
|
||||
try:
|
||||
return (
|
||||
float(p.regular_price)
|
||||
if p.regular_price is not None
|
||||
else (
|
||||
float(p.special_price)
|
||||
if p.special_price is not None
|
||||
else None
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# ---------- NAV ----------
|
||||
async def db_nav(session) -> Dict:
|
||||
tops = (await session.execute(select(NavTop))).scalars().all()
|
||||
subs = (await session.execute(select(NavSub))).scalars().all()
|
||||
|
||||
subs_by_top: Dict[int, List[Dict]] = {}
|
||||
for s in subs:
|
||||
sub_name = (s.label or s.slug or "").strip()
|
||||
subs_by_top.setdefault(s.top_id, []).append({
|
||||
"label": s.label,
|
||||
"name": sub_name, # back-compat for callers expecting "name"
|
||||
"slug": s.slug,
|
||||
"href": s.href,
|
||||
})
|
||||
|
||||
cats: Dict[str, Dict] = {}
|
||||
for t in tops:
|
||||
top_label = (t.label or t.slug or "").strip()
|
||||
cats[top_label] = {
|
||||
"label": t.label,
|
||||
"name": top_label, # back-compat
|
||||
"slug": t.slug,
|
||||
"subs": sorted(subs_by_top.get(t.id, []), key=lambda x: (x["name"] or "").lower()),
|
||||
}
|
||||
return {"cats": cats}
|
||||
|
||||
|
||||
async def db_product_full(session, slug: str, user_id=0) -> Optional[dict]:
|
||||
|
||||
liked_product_ids_subq = (
|
||||
select(ProductLike.product_slug)
|
||||
.where(
|
||||
and_(
|
||||
ProductLike.user_id == user_id,
|
||||
ProductLike.deleted_at.is_(None)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
is_liked_case = case(
|
||||
(and_(
|
||||
(Product.slug.in_(liked_product_ids_subq)),
|
||||
Product.deleted_at.is_(None)
|
||||
), True),
|
||||
else_=False
|
||||
).label("is_liked")
|
||||
|
||||
q = (
|
||||
select(Product, is_liked_case)
|
||||
.where(Product.slug == slug, Product.deleted_at.is_(None))
|
||||
.options(
|
||||
selectinload(Product.images.and_(ProductImage.deleted_at.is_(None))),
|
||||
selectinload(Product.sections.and_(ProductSection.deleted_at.is_(None))),
|
||||
selectinload(Product.labels.and_(ProductLabel.deleted_at.is_(None))),
|
||||
selectinload(Product.stickers.and_(ProductSticker.deleted_at.is_(None))),
|
||||
selectinload(Product.attributes.and_(ProductAttribute.deleted_at.is_(None))),
|
||||
selectinload(Product.nutrition.and_(ProductNutrition.deleted_at.is_(None))),
|
||||
selectinload(Product.allergens.and_(ProductAllergen.deleted_at.is_(None))),
|
||||
)
|
||||
)
|
||||
result = await session.execute(q)
|
||||
|
||||
row = result.first() if result is not None else None
|
||||
p, is_liked = row if row else (None, None)
|
||||
if not p:
|
||||
return None
|
||||
|
||||
gallery = [
|
||||
img.url
|
||||
for img in sorted(p.images, key=lambda i: (i.kind or "gallery", i.position or 0))
|
||||
if (img.kind or "gallery") == "gallery"
|
||||
]
|
||||
embedded = [
|
||||
img.url
|
||||
for img in sorted(p.images, key=lambda i: i.position or 0)
|
||||
if (img.kind or "") == "embedded"
|
||||
]
|
||||
all_imgs = [
|
||||
img.url
|
||||
for img in sorted(p.images, key=lambda i: i.position or 0)
|
||||
if (img.kind or "") == "all"
|
||||
]
|
||||
return {
|
||||
"id": p.id,
|
||||
"slug": p.slug,
|
||||
"title": p.title,
|
||||
"brand": p.brand,
|
||||
"image": p.image,
|
||||
"description_short": p.description_short,
|
||||
"description_html": p.description_html,
|
||||
"suma_href": p.suma_href,
|
||||
"rrp": float(p.rrp) if p.rrp is not None else None,
|
||||
"special_price": float(p.special_price) if p.special_price is not None else None,
|
||||
"special_price_raw": p.special_price_raw,
|
||||
"special_price_currency": p.special_price_currency,
|
||||
"regular_price": _regular_price_of(p),
|
||||
"regular_price_raw": p.regular_price_raw,
|
||||
"regular_price_currency": p.regular_price_currency,
|
||||
"rrp_raw": p.rrp_raw,
|
||||
"rrp_currency": p.rrp_currency,
|
||||
"price_per_unit_raw": p.price_per_unit_raw,
|
||||
"price_per_unit": p.price_per_unit,
|
||||
"price_per_unit_currency": p.price_per_unit_currency,
|
||||
"oe_list_price": p.oe_list_price,
|
||||
"images": gallery,
|
||||
"embedded_image_urls": embedded,
|
||||
"all_image_urls": all_imgs,
|
||||
"sections": [{"title": s.title, "html": s.html} for s in p.sections],
|
||||
"stickers": [v.name.strip().lower() for v in p.stickers if v.name],
|
||||
"labels": [v.name for v in p.labels if v.name],
|
||||
"ean": p.ean,
|
||||
"sku": p.sku,
|
||||
"unit_size": p.unit_size,
|
||||
"pack_size": p.pack_size,
|
||||
"case_size_raw": p.case_size_raw,
|
||||
"case_size_count": p.case_size_count,
|
||||
"case_size_item_qty": p.case_size_item_qty,
|
||||
"case_size_item_unit": p.case_size_item_unit,
|
||||
"info_table": {a.key: a.value for a in p.attributes if a.key},
|
||||
"nutrition": [{"key": n.key, "value": n.value, "unit": n.unit} for n in p.nutrition if n.key],
|
||||
"allergens": [{"name": a.name, "contains": a.contains} for a in p.allergens if a.name],
|
||||
"is_liked": is_liked,
|
||||
"deleted_at": p.deleted_at
|
||||
}
|
||||
|
||||
|
||||
async def db_product_full_id(session, id:int, user_id=0) -> Optional[dict]:
|
||||
liked_product_ids_subq = (
|
||||
select(ProductLike.product_slug)
|
||||
.where(
|
||||
and_(
|
||||
ProductLike.user_id == user_id,
|
||||
ProductLike.deleted_at.is_(None)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
is_liked_case = case(
|
||||
(
|
||||
(Product.slug.in_(liked_product_ids_subq)),
|
||||
True
|
||||
),
|
||||
else_=False
|
||||
).label("is_liked")
|
||||
|
||||
q = (
|
||||
select(Product, is_liked_case)
|
||||
.where(Product.id == id)
|
||||
.options(
|
||||
selectinload(Product.images.and_(ProductImage.deleted_at.is_(None))),
|
||||
selectinload(Product.sections.and_(ProductSection.deleted_at.is_(None))),
|
||||
selectinload(Product.labels.and_(ProductLabel.deleted_at.is_(None))),
|
||||
selectinload(Product.stickers.and_(ProductSticker.deleted_at.is_(None))),
|
||||
selectinload(Product.attributes.and_(ProductAttribute.deleted_at.is_(None))),
|
||||
selectinload(Product.nutrition.and_(ProductNutrition.deleted_at.is_(None))),
|
||||
selectinload(Product.allergens.and_(ProductAllergen.deleted_at.is_(None))),
|
||||
)
|
||||
)
|
||||
result = await session.execute(q)
|
||||
|
||||
row = result.first() if result is not None else None
|
||||
p, is_liked = row if row else (None, None)
|
||||
if not p:
|
||||
return None
|
||||
|
||||
gallery = [
|
||||
img.url
|
||||
for img in sorted(p.images, key=lambda i: (i.kind or "gallery", i.position or 0))
|
||||
if (img.kind or "gallery") == "gallery"
|
||||
]
|
||||
embedded = [
|
||||
img.url
|
||||
for img in sorted(p.images, key=lambda i: i.position or 0)
|
||||
if (img.kind or "") == "embedded"
|
||||
]
|
||||
all_imgs = [
|
||||
img.url
|
||||
for img in sorted(p.images, key=lambda i: i.position or 0)
|
||||
if (img.kind or "") == "all"
|
||||
]
|
||||
return {
|
||||
"id": p.id,
|
||||
"slug": p.slug,
|
||||
"title": p.title,
|
||||
"brand": p.brand,
|
||||
"image": p.image,
|
||||
"description_short": p.description_short,
|
||||
"description_html": p.description_html,
|
||||
"suma_href": p.suma_href,
|
||||
"rrp": float(p.rrp) if p.rrp is not None else None,
|
||||
"special_price": float(p.special_price) if p.special_price is not None else None,
|
||||
"special_price_raw": p.special_price_raw,
|
||||
"special_price_currency": p.special_price_currency,
|
||||
"regular_price": _regular_price_of(p),
|
||||
"regular_price_raw": p.regular_price_raw,
|
||||
"regular_price_currency": p.regular_price_currency,
|
||||
"rrp_raw": p.rrp_raw,
|
||||
"rrp_currency": p.rrp_currency,
|
||||
"price_per_unit_raw": p.price_per_unit_raw,
|
||||
"price_per_unit": p.price_per_unit,
|
||||
"price_per_unit_currency": p.price_per_unit_currency,
|
||||
"oe_list_price": p.oe_list_price,
|
||||
"images": gallery,
|
||||
"embedded_image_urls": embedded,
|
||||
"all_image_urls": all_imgs,
|
||||
"sections": [{"title": s.title, "html": s.html} for s in p.sections],
|
||||
"stickers": [v.name.strip().lower() for v in p.stickers if v.name],
|
||||
"labels": [v.name for v in p.labels if v.name],
|
||||
"ean": p.ean,
|
||||
"sku": p.sku,
|
||||
"unit_size": p.unit_size,
|
||||
"pack_size": p.pack_size,
|
||||
"case_size_raw": p.case_size_raw,
|
||||
"case_size_count": p.case_size_count,
|
||||
"case_size_item_qty": p.case_size_item_qty,
|
||||
"case_size_item_unit": p.case_size_item_unit,
|
||||
"info_table": {a.key: a.value for a in p.attributes if a.key},
|
||||
"nutrition": [{"key": n.key, "value": n.value, "unit": n.unit} for n in p.nutrition if n.key],
|
||||
"allergens": [{"name": a.name, "contains": a.contains} for a in p.allergens if a.name],
|
||||
"is_liked": is_liked,
|
||||
"deleted_at": p.deleted_at
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# ---------- PRODUCTS LISTING ----------
|
||||
|
||||
async def db_products_nocounts(
|
||||
session,
|
||||
top_slug: str | None,
|
||||
sub_slug: str | None,
|
||||
selected_brands: Optional[List[str]] = None,
|
||||
selected_stickers: Optional[List[str]] = None,
|
||||
selected_labels: Optional[List[str]] = None,
|
||||
page: int = 1,
|
||||
search: Optional[str] = None,
|
||||
sort: Optional[str] = None,
|
||||
page_size: int = 20,
|
||||
liked: bool = None,
|
||||
user_id: int=0
|
||||
) -> Dict:
|
||||
BLOCKED_SLUGS = set((config().get("blacklist", {}).get("product", []) or []))
|
||||
base_conditions = []
|
||||
if BLOCKED_SLUGS:
|
||||
base_conditions.append(
|
||||
~Product.slug.in_(BLOCKED_SLUGS),
|
||||
)
|
||||
|
||||
if top_slug:
|
||||
|
||||
q_list = (
|
||||
select(Listing.id)
|
||||
.join(NavTop, Listing.top)
|
||||
.outerjoin(NavSub, Listing.sub)
|
||||
.where(
|
||||
Listing.deleted_at.is_(None),
|
||||
NavTop.deleted_at.is_(None),
|
||||
NavTop.slug == top_slug,
|
||||
NavSub.deleted_at.is_(None),
|
||||
NavSub.slug == sub_slug if sub_slug else Listing.sub_id.is_(None),
|
||||
)
|
||||
)
|
||||
|
||||
listing_id = (await session.execute(q_list)).scalars().first()
|
||||
if not listing_id:
|
||||
return {"total_pages": 1, "items": []}
|
||||
|
||||
base_conditions.append(Product.slug.in_(
|
||||
select(ListingItem.slug).where(ListingItem.listing_id == listing_id, ListingItem.deleted_at.is_(None))
|
||||
))
|
||||
|
||||
base_ids_subq = select(Product.id).where(*base_conditions, Product.deleted_at.is_(None))
|
||||
base_ids = (await session.execute(base_ids_subq)).scalars().all()
|
||||
|
||||
if not base_ids:
|
||||
return {"total_pages": 1, "items": []}
|
||||
sel_brands = [(b or "").strip().lower() for b in (selected_brands or []) if (b or "").strip()]
|
||||
sel_stickers = [(s or "").strip().lower() for s in (selected_stickers or []) if (s or "").strip()]
|
||||
sel_labels = [(l or "").strip().lower() for l in (selected_labels or []) if (l or "").strip()]
|
||||
search_q = (search or "").strip().lower()
|
||||
|
||||
filter_conditions = []
|
||||
if sel_brands:
|
||||
filter_conditions.append(func.lower(Product.brand).in_(sel_brands))
|
||||
for sticker_name in sel_stickers:
|
||||
filter_conditions.append(
|
||||
Product.stickers.any(
|
||||
and_(
|
||||
func.lower(ProductSticker.name) == sticker_name,
|
||||
ProductSticker.deleted_at.is_(None)
|
||||
)
|
||||
)
|
||||
)
|
||||
for label_name in sel_labels:
|
||||
filter_conditions.append(
|
||||
Product.labels.any(
|
||||
and_(
|
||||
func.lower(ProductLabel.name) == label_name,
|
||||
ProductLabel.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
)
|
||||
if search_q:
|
||||
filter_conditions.append(func.lower(Product.description_short).contains(search_q))
|
||||
if liked:
|
||||
liked_subq = liked_subq = (
|
||||
select(ProductLike.product_slug)
|
||||
.where(
|
||||
and_(
|
||||
ProductLike.user_id == user_id,
|
||||
ProductLike.deleted_at.is_(None)
|
||||
)
|
||||
)
|
||||
.subquery()
|
||||
)
|
||||
filter_conditions.append(Product.slug.in_(liked_subq))
|
||||
|
||||
filtered_count_query = select(func.count(Product.id)).where(Product.id.in_(base_ids), *filter_conditions)
|
||||
total_filtered = (await session.execute(filtered_count_query)).scalars().one()
|
||||
total_pages = max(1, (total_filtered + page_size - 1) // page_size)
|
||||
page = max(1, page)
|
||||
|
||||
|
||||
liked_product_slugs_subq = (
|
||||
select(ProductLike.product_slug)
|
||||
.where(
|
||||
and_(
|
||||
ProductLike.user_id == user_id,
|
||||
ProductLike.deleted_at.is_(None)
|
||||
)
|
||||
)
|
||||
)
|
||||
is_liked_case = case(
|
||||
(Product.slug.in_(liked_product_slugs_subq), True),
|
||||
else_=False
|
||||
).label("is_liked")
|
||||
|
||||
q_filtered = select(Product, is_liked_case).where(Product.id.in_(base_ids), *filter_conditions).options(
|
||||
selectinload(Product.images),
|
||||
selectinload(Product.sections),
|
||||
selectinload(Product.labels),
|
||||
selectinload(Product.stickers),
|
||||
selectinload(Product.attributes),
|
||||
selectinload(Product.nutrition),
|
||||
selectinload(Product.allergens),
|
||||
)
|
||||
|
||||
sort_mode = (sort or "az").strip().lower()
|
||||
if sort_mode == "az":
|
||||
q_filtered = q_filtered.order_by(func.lower(Product.title), Product.slug)
|
||||
elif sort_mode == "za":
|
||||
q_filtered = q_filtered.order_by(func.lower(Product.title).desc(), Product.slug.desc())
|
||||
elif sort_mode in ("price-asc", "price_asc", "price-low", "price-low-high", "low-high", "lo-hi"):
|
||||
q_filtered = q_filtered.order_by(
|
||||
case((Product.regular_price.is_(None), 1), else_=0),
|
||||
Product.regular_price.asc(),
|
||||
func.lower(Product.title),
|
||||
Product.slug
|
||||
)
|
||||
elif sort_mode in ("price-desc", "price_desc", "price-high", "price-high-low", "high-low", "hi-lo"):
|
||||
q_filtered = q_filtered.order_by(
|
||||
case((Product.regular_price.is_(None), 1), else_=0),
|
||||
Product.regular_price.desc(),
|
||||
func.lower(Product.title),
|
||||
Product.slug
|
||||
)
|
||||
else:
|
||||
q_filtered = q_filtered.order_by(func.lower(Product.title), Product.slug)
|
||||
|
||||
offset_val = (page - 1) * page_size
|
||||
q_filtered = q_filtered.offset(offset_val).limit(page_size)
|
||||
products_page = (await session.execute(q_filtered)).all()
|
||||
|
||||
items: List[Dict] = []
|
||||
for p, is_liked in products_page:
|
||||
gallery_imgs = sorted((img for img in p.images), key=lambda i: (i.kind or "gallery", i.position or 0))
|
||||
gallery = [img.url for img in gallery_imgs if (img.kind or "gallery") == "gallery"]
|
||||
embedded = [img.url for img in sorted(p.images, key=lambda i: i.position or 0) if (img.kind or "") == "embedded"]
|
||||
all_imgs = [img.url for img in sorted(p.images, key=lambda i: i.position or 0) if (img.kind or "") == "all"]
|
||||
|
||||
items.append({
|
||||
"slug": p.slug,
|
||||
"title": p.title,
|
||||
"brand": p.brand,
|
||||
"description_short": p.description_short,
|
||||
"description_html": p.description_html,
|
||||
"image": p.image,
|
||||
"rrp": float(p.rrp) if p.rrp is not None else None,
|
||||
"special_price": float(p.special_price) if p.special_price is not None else None,
|
||||
"special_price_raw": p.special_price_raw,
|
||||
"special_price_currency": p.special_price_currency,
|
||||
"regular_price": _regular_price_of(p),
|
||||
"regular_price_raw": p.regular_price_raw,
|
||||
"regular_price_currency": p.regular_price_currency,
|
||||
"rrp_raw": p.rrp_raw,
|
||||
"rrp_currency": p.rrp_currency,
|
||||
"price_per_unit_raw": p.price_per_unit_raw,
|
||||
"price_per_unit": p.price_per_unit,
|
||||
"price_per_unit_currency": p.price_per_unit_currency,
|
||||
"images": gallery,
|
||||
"embedded_image_urls": embedded,
|
||||
"all_image_urls": all_imgs,
|
||||
"sections": [{"title": s.title, "html": s.html} for s in p.sections],
|
||||
"labels": [l.name for l in p.labels if l.name],
|
||||
"stickers": [s.name.strip().lower() for s in p.stickers if s.name],
|
||||
"info_table": {a.key: a.value for a in p.attributes if a.key},
|
||||
"nutrition": [{"key": n.key, "value": n.value, "unit": n.unit} for n in p.nutrition if n.key],
|
||||
"allergens": [{"name": a.name, "contains": a.contains} for a in p.allergens if a.name],
|
||||
"ean": p.ean,
|
||||
"sku": p.sku,
|
||||
"unit_size": p.unit_size,
|
||||
"pack_size": p.pack_size,
|
||||
"is_liked": is_liked,
|
||||
})
|
||||
|
||||
return {
|
||||
"total_pages": total_pages,
|
||||
"items": items,
|
||||
}
|
||||
|
||||
|
||||
async def db_products_counts(
|
||||
session,
|
||||
top_slug: str | None,
|
||||
sub_slug: str | None,
|
||||
search: Optional[str] = None,
|
||||
user_id: int=0
|
||||
) -> Dict:
|
||||
BLOCKED_SLUGS = set((config().get("blacklist", {}).get("product", []) or []))
|
||||
base_conditions = []
|
||||
|
||||
if top_slug:
|
||||
q_list = select(Listing.id).where(
|
||||
Listing.deleted_at.is_(None),
|
||||
Listing.top.has(slug=top_slug),
|
||||
Listing.sub.has(slug=sub_slug) if sub_slug else Listing.sub_id.is_(None),
|
||||
)
|
||||
listing_id = (await session.execute(q_list)).scalars().first()
|
||||
if not listing_id:
|
||||
return {
|
||||
"brands": [],
|
||||
"stickers": [],
|
||||
"labels": [],
|
||||
"liked_count": 0,
|
||||
"search_count": 0,
|
||||
}
|
||||
|
||||
listing_slug_subquery = select(ListingItem.slug).where(ListingItem.listing_id == listing_id, ListingItem.deleted_at.is_(None))
|
||||
|
||||
if BLOCKED_SLUGS:
|
||||
base_conditions.append(
|
||||
and_(
|
||||
Product.slug.in_(listing_slug_subquery),
|
||||
~Product.slug.in_(BLOCKED_SLUGS),
|
||||
)
|
||||
)
|
||||
else:
|
||||
base_conditions.append(Product.slug.in_(listing_slug_subquery))
|
||||
else:
|
||||
if BLOCKED_SLUGS:
|
||||
base_conditions.append(~Product.slug.in_(BLOCKED_SLUGS))
|
||||
base_ids = (await session.execute(select(Product.id).where(*base_conditions, Product.deleted_at.is_(None)))).scalars().all()
|
||||
if base_ids:
|
||||
base_products_slugs = (await session.execute(
|
||||
select(Product.slug).where(Product.id.in_(base_ids), Product.deleted_at.is_(None))
|
||||
)).scalars().all()
|
||||
if not base_products_slugs:
|
||||
return {
|
||||
"brands": [],
|
||||
"stickers": [],
|
||||
"labels": [],
|
||||
"liked_count": 0,
|
||||
"search_count": 0,
|
||||
}
|
||||
base_ids = (await session.execute(
|
||||
select(Product.id).where(Product.slug.in_(base_products_slugs), Product.deleted_at.is_(None))
|
||||
)).scalars().all()
|
||||
else:
|
||||
return {
|
||||
"brands": [],
|
||||
"stickers": [],
|
||||
"labels": [],
|
||||
"liked_count": 0,
|
||||
"search_count": 0,
|
||||
}
|
||||
|
||||
brands_list: List[Dict] = []
|
||||
stickers_list: List[Dict] = []
|
||||
labels_list: List[Dict] = []
|
||||
liked_count = 0
|
||||
search_count = 0
|
||||
liked_product_slugs_subq = (
|
||||
select(ProductLike.product_slug)
|
||||
.where(ProductLike.user_id == user_id, ProductLike.deleted_at.is_(None))
|
||||
)
|
||||
liked_count = await session.scalar(
|
||||
select(func.count(Product.id))
|
||||
.where(
|
||||
Product.id.in_(base_ids),
|
||||
Product.slug.in_(liked_product_slugs_subq),
|
||||
Product.deleted_at.is_(None)
|
||||
)
|
||||
)
|
||||
|
||||
liked_count = (await session.execute(
|
||||
select(func.count())
|
||||
.select_from(ProductLike)
|
||||
.where(
|
||||
ProductLike.user_id == user_id,
|
||||
ProductLike.product_slug.in_(
|
||||
select(Product.slug).where(Product.id.in_(base_ids))
|
||||
),
|
||||
ProductLike.deleted_at.is_(None)
|
||||
)
|
||||
)).scalar_one() if user_id else 0
|
||||
|
||||
# Brand counts
|
||||
brand_count_rows = await session.execute(
|
||||
select(Product.brand, func.count(Product.id))
|
||||
.where(Product.id.in_(base_ids),
|
||||
Product.brand.is_not(None),
|
||||
func.trim(Product.brand) != "",
|
||||
Product.deleted_at.is_(None)
|
||||
)
|
||||
.group_by(Product.brand)
|
||||
)
|
||||
for brand_name, count in brand_count_rows:
|
||||
brands_list.append({"name": brand_name, "count": count})
|
||||
brands_list.sort(key=lambda x: (-x["count"], x["name"].lower()))
|
||||
|
||||
# Sticker counts
|
||||
sticker_count_rows = await session.execute(
|
||||
select(ProductSticker.name, func.count(ProductSticker.product_id))
|
||||
.where(
|
||||
ProductSticker.product_id.in_(base_ids),
|
||||
ProductSticker.deleted_at.is_(None)
|
||||
)
|
||||
.group_by(ProductSticker.name)
|
||||
)
|
||||
for sticker_name, count in sticker_count_rows:
|
||||
if sticker_name:
|
||||
stickers_list.append({"name": sticker_name.strip().lower(), "count": count})
|
||||
stickers_list.sort(key=lambda x: (-x["count"], x["name"]))
|
||||
|
||||
# Label counts
|
||||
label_count_rows = await session.execute(
|
||||
select(ProductLabel.name, func.count(ProductLabel.product_id))
|
||||
.where(
|
||||
ProductLabel.product_id.in_(base_ids),
|
||||
ProductLabel.deleted_at.is_(None)
|
||||
)
|
||||
.group_by(ProductLabel.name)
|
||||
)
|
||||
for label_name, count in label_count_rows:
|
||||
if label_name:
|
||||
labels_list.append({"name": label_name, "count": count})
|
||||
labels_list.sort(key=lambda x: (-x["count"], x["name"]))
|
||||
|
||||
|
||||
# Search count
|
||||
search_q = (search or "").strip().lower()
|
||||
if search_q:
|
||||
search_count = (await session.execute(
|
||||
select(func.count(Product.id))
|
||||
.where(
|
||||
Product.id.in_(base_ids),
|
||||
func.lower(Product.description_short).contains(search_q),
|
||||
Product.deleted_at.is_(None)
|
||||
)
|
||||
)).scalars().one()
|
||||
else:
|
||||
search_count = len(base_ids)
|
||||
|
||||
return {
|
||||
"brands": brands_list,
|
||||
"stickers": stickers_list,
|
||||
"labels": labels_list,
|
||||
"liked_count": liked_count,
|
||||
"search_count": search_count,
|
||||
}
|
||||
|
||||
async def db_products(
|
||||
session,
|
||||
top_slug: str | None,
|
||||
sub_slug: str | None,
|
||||
selected_brands: Optional[List[str]] = None,
|
||||
selected_stickers: Optional[List[str]] = None,
|
||||
selected_labels: Optional[List[str]] = None,
|
||||
page: int = 1,
|
||||
search: Optional[str] = None,
|
||||
sort: Optional[str] = None,
|
||||
page_size: int = 20,
|
||||
liked: bool = None,
|
||||
user_id: int=0
|
||||
) -> Dict:
|
||||
return {
|
||||
**(await db_products_nocounts(
|
||||
session,
|
||||
top_slug=top_slug,
|
||||
sub_slug=sub_slug,
|
||||
selected_brands=selected_brands,
|
||||
selected_stickers=selected_stickers,
|
||||
selected_labels=selected_labels,
|
||||
page=page,
|
||||
search=search,
|
||||
sort=sort,
|
||||
page_size=page_size,
|
||||
liked=liked,
|
||||
user_id=user_id
|
||||
)),
|
||||
**(await db_products_counts(
|
||||
session,
|
||||
top_slug=top_slug,
|
||||
sub_slug=sub_slug,
|
||||
search=search,
|
||||
user_id=user_id
|
||||
)),
|
||||
}
|
||||
|
||||
|
||||
163
bp/browse/services/nav.py
Normal file
163
bp/browse/services/nav.py
Normal file
@@ -0,0 +1,163 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
import re
|
||||
from typing import Dict, List, Tuple, Optional
|
||||
from urllib.parse import urlparse, urljoin
|
||||
|
||||
from config import config
|
||||
from . import db_backend as cb
|
||||
from .blacklist.category import is_category_blocked # Reverse map: slug -> label
|
||||
|
||||
# ------------------ Caches ------------------
|
||||
_nav_cache: Dict = {}
|
||||
_nav_cache_ts: float = 0.0
|
||||
_nav_ttl_seconds = 60 * 60 * 6 # 6 hours
|
||||
|
||||
|
||||
def _now() -> float:
|
||||
try:
|
||||
return now() # type: ignore[name-defined]
|
||||
except Exception:
|
||||
return time.time()
|
||||
|
||||
|
||||
def extract_sub_slug(href: str, top_slug: str) -> Optional[str]:
|
||||
p = urlparse(href)
|
||||
parts = [x for x in (p.path or "").split("/") if x]
|
||||
if len(parts) >= 2 and parts[0].lower() == top_slug.lower():
|
||||
sub = parts[1]
|
||||
if sub.lower().endswith((".html", ".htm")):
|
||||
sub = re.sub(r"\.(html?|HTML?)$", "", sub)
|
||||
return sub
|
||||
return None
|
||||
|
||||
|
||||
def group_by_category(slug_to_links: Dict[str, List[Tuple[str, str]]]) -> Dict[str, Dict]:
|
||||
nav = {"cats": {}}
|
||||
for label, slug in config()["categories"]["allow"].items():
|
||||
top_href = urljoin(config()["base_url"], f"/{slug}")
|
||||
subs = []
|
||||
for text, href in slug_to_links.get(slug, []):
|
||||
sub_slug = extract_sub_slug(href, slug)
|
||||
if sub_slug:
|
||||
subs.append({
|
||||
"name": text,
|
||||
"href": href,
|
||||
"slug": sub_slug,
|
||||
# no count here yet in this path
|
||||
})
|
||||
subs.sort(key=lambda x: x["name"].lower())
|
||||
nav["cats"][label] = {"href": top_href, "slug": slug, "subs": subs}
|
||||
nav = _apply_category_blacklist(nav)
|
||||
return nav
|
||||
|
||||
|
||||
async def get_nav(session) -> Dict[str, Dict]:
|
||||
"""
|
||||
Return navigation structure; annotate each sub with product counts.
|
||||
Uses snapshot for offline behaviour.
|
||||
"""
|
||||
global _nav_cache, _nav_cache_ts
|
||||
now_ts = _now()
|
||||
|
||||
# load from snapshot
|
||||
nav = await cb.db_nav(session)
|
||||
|
||||
# inject counts for each subcategory (and for top-level too if you like)
|
||||
for label, cat in (nav.get("cats") or {}).items():
|
||||
top_slug = cat.get("slug")
|
||||
if not top_slug:
|
||||
continue
|
||||
|
||||
|
||||
# Counts for subs
|
||||
new_subs = []
|
||||
for s in cat.get("subs", []):
|
||||
s.get("slug")
|
||||
#if not sub_slug:
|
||||
# s_count = 0
|
||||
#else:
|
||||
# s_count = await cb.db_count_products_in_sub(session,top_slug, sub_slug)
|
||||
#print('sub', s_count)
|
||||
new_subs.append({
|
||||
**s,
|
||||
#"count": s_count,
|
||||
})
|
||||
cat["subs"] = new_subs
|
||||
|
||||
_nav_cache = nav
|
||||
_nav_cache_ts = now_ts
|
||||
|
||||
nav = _apply_category_blacklist(nav)
|
||||
return nav
|
||||
|
||||
|
||||
def category_context(top_slug: Optional[str], sub_slug: Optional[str], nav: Dict[str, Dict]):
|
||||
"""Build template context for a category/subcategory page."""
|
||||
def _order_subs_selected_first(subs, sub_slug: str | None):
|
||||
"""Return subs with the selected subcategory (by slug) first."""
|
||||
if not subs or not sub_slug:
|
||||
return subs
|
||||
head = [s for s in subs if sub_slug and sub_slug.lower() == s['slug']]
|
||||
tail = [s for s in subs if not (sub_slug and sub_slug.lower() == s['slug'])]
|
||||
return head + tail
|
||||
|
||||
REVERSE_CATEGORY = {v: k for k, v in config()["categories"]["allow"].items()}
|
||||
label = REVERSE_CATEGORY.get(top_slug)
|
||||
cat = nav["cats"].get(label) or {}
|
||||
|
||||
top_suma_href = cat.get("href") or urljoin(config()["base_url"], f"/{top_slug}")
|
||||
top_local_href = f"{top_slug}"
|
||||
|
||||
# total products in this top-level category (all subs combined / top-level listing)
|
||||
top_count = cat.get("count", 0)
|
||||
|
||||
subs = []
|
||||
for s in cat.get("subs", []):
|
||||
subs.append({
|
||||
"name": s["name"],
|
||||
"slug": s.get("slug"),
|
||||
"local_href": f"{top_slug}/{s.get('slug')}",
|
||||
"suma_href": s["href"],
|
||||
"count": s.get("count", 0), # per-subcategory product count
|
||||
})
|
||||
|
||||
current_local_href = (
|
||||
f"{top_slug}/{sub_slug}" if sub_slug
|
||||
else f"{top_slug}" if top_slug
|
||||
else ""
|
||||
)
|
||||
|
||||
return {
|
||||
"category_label": label,
|
||||
"top_slug": top_slug,
|
||||
"sub_slug": sub_slug,
|
||||
"top_suma_href": top_suma_href,
|
||||
"top_local_href": top_local_href,
|
||||
|
||||
# 👇 expose total count for the parent category
|
||||
"top_count": top_count,
|
||||
|
||||
# list of subcategories, each with its own count
|
||||
"subs_local": _order_subs_selected_first(subs, sub_slug),
|
||||
|
||||
#"current_local_href": current_local_href,
|
||||
}
|
||||
|
||||
def _apply_category_blacklist(nav: Dict[str, Dict]) -> Dict[str, Dict]:
|
||||
cats = nav.get("cats", {})
|
||||
out = {"cats": {}}
|
||||
for label, data in cats.items():
|
||||
top = (data or {}).get("slug")
|
||||
if not top or is_category_blocked(top):
|
||||
continue
|
||||
# filter subs
|
||||
subs = []
|
||||
for s in (data.get("subs") or []):
|
||||
sub_slug = s.get("slug")
|
||||
if sub_slug and not is_category_blocked(top, sub_slug):
|
||||
subs.append(s)
|
||||
# keep everything else (including counts)
|
||||
out["cats"][label] = {**data, "subs": subs}
|
||||
return out
|
||||
118
bp/browse/services/products.py
Normal file
118
bp/browse/services/products.py
Normal file
@@ -0,0 +1,118 @@
|
||||
# products.py
|
||||
from __future__ import annotations
|
||||
from typing import List, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .state import KNOWN_PRODUCT_SLUGS
|
||||
from .blacklist.category import is_category_blocked
|
||||
from . import db_backend as cb
|
||||
|
||||
# NEW IMPORT:
|
||||
from quart import g
|
||||
|
||||
async def products(
|
||||
list_url: str,
|
||||
selected_brands: Optional[List[str]] = None,
|
||||
selected_stickers: Optional[List[str]] = None,
|
||||
selected_labels: Optional[List[str]] = None,
|
||||
page: int = 1,
|
||||
search: Optional[str] = None,
|
||||
sort: Optional[str] = None,
|
||||
liked: Optional[bool] = None,
|
||||
|
||||
# NEW:
|
||||
user_id: Optional[int] = None,
|
||||
):
|
||||
p = urlparse(list_url)
|
||||
parts = [x for x in (p.path or "").split("/") if x]
|
||||
top = parts[0] if parts else None
|
||||
sub = parts[1] if len(parts) >= 2 else None
|
||||
|
||||
if is_category_blocked(top, sub):
|
||||
return [], [], [], [], 1 # <- note: 5 values now, keep shape consistent below
|
||||
data = await cb.db_products(
|
||||
g.s,
|
||||
top,
|
||||
sub,
|
||||
selected_brands,
|
||||
selected_stickers,
|
||||
selected_labels,
|
||||
page,
|
||||
search,
|
||||
sort,
|
||||
liked=liked,
|
||||
user_id = g.user.id if g.user else 0
|
||||
)
|
||||
items = data.get("items", []) or []
|
||||
brands = data.get("brands", []) or []
|
||||
stickers = data.get("stickers", []) or []
|
||||
labels = data.get("labels", []) or []
|
||||
total_pages = int(data.get("total_pages", 1) or 1)
|
||||
|
||||
# Track known product slugs
|
||||
for it in items:
|
||||
try:
|
||||
slug = it.get("slug")
|
||||
if slug:
|
||||
KNOWN_PRODUCT_SLUGS.add(slug)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# --- NEW BIT: mark which are liked by this user ---
|
||||
|
||||
|
||||
# Return same shape you were already returning:
|
||||
# items, brands, stickers, labels, total_pages
|
||||
return items, brands, stickers, labels, total_pages, data.get("liked_count"), data.get("search_count")
|
||||
|
||||
|
||||
async def products_nocounts(
|
||||
session,
|
||||
list_url: str,
|
||||
selected_brands: Optional[List[str]] = None,
|
||||
selected_stickers: Optional[List[str]] = None,
|
||||
selected_labels: Optional[List[str]] = None,
|
||||
page: int = 1,
|
||||
search: Optional[str] = None,
|
||||
sort: Optional[str] = None,
|
||||
liked: Optional[bool] = None,
|
||||
|
||||
# NEW:
|
||||
user_id: Optional[int] = None,
|
||||
):
|
||||
p = urlparse(list_url)
|
||||
parts = [x for x in (p.path or "").split("/") if x]
|
||||
top = parts[0] if parts else None
|
||||
sub = parts[1] if len(parts) >= 2 else None
|
||||
|
||||
if is_category_blocked(top, sub):
|
||||
return [], [], [], [], 1 # <- note: 5 values now, keep shape consistent below
|
||||
data = await cb.db_products_nocounts(
|
||||
session,
|
||||
top,
|
||||
sub,
|
||||
selected_brands,
|
||||
selected_stickers,
|
||||
selected_labels,
|
||||
page,
|
||||
search,
|
||||
sort,
|
||||
liked=liked,
|
||||
user_id = g.user.id if g.user else 0,
|
||||
)
|
||||
items = data.get("items", []) or []
|
||||
total_pages = int(data.get("total_pages", 1) or 1)
|
||||
|
||||
# Track known product slugs
|
||||
for it in items:
|
||||
try:
|
||||
slug = it.get("slug")
|
||||
if slug:
|
||||
KNOWN_PRODUCT_SLUGS.add(slug)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# Return same shape you were already returning:
|
||||
# items, brands, stickers, labels, total_pages
|
||||
return items, total_pages
|
||||
179
bp/browse/services/services.py
Normal file
179
bp/browse/services/services.py
Normal file
@@ -0,0 +1,179 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from quart import (
|
||||
g,
|
||||
request,
|
||||
)
|
||||
from config import config
|
||||
from .products import products, products_nocounts
|
||||
from .blacklist.product_details import is_blacklisted_heading
|
||||
|
||||
from utils import host_url
|
||||
|
||||
|
||||
from sqlalchemy import select
|
||||
from models import ProductLike
|
||||
from ...market.filters.qs import decode
|
||||
|
||||
|
||||
def _hx_fragment_request() -> bool:
|
||||
return request.headers.get("HX-Request", "").lower() == "true"
|
||||
|
||||
async def _productInfo(top_slug=None, sub_slug=None):
|
||||
"""
|
||||
Shared query logic for home / category / subcategory pages.
|
||||
Pulls filters from qs.decode(), queries products(), and orders brands/stickers/etc.
|
||||
"""
|
||||
|
||||
q = decode()
|
||||
page, search, sort = q.page, q.search, q.sort
|
||||
selected_brands, selected_stickers, selected_labels = q.selected_brands, q.selected_stickers, q.selected_labels
|
||||
liked = q.liked
|
||||
|
||||
if top_slug is not None and sub_slug is not None:
|
||||
list_url = urljoin(config()["base_url"], f"/{top_slug}/{sub_slug}")
|
||||
else:
|
||||
if top_slug is not None:
|
||||
list_url = top_slug
|
||||
else:
|
||||
list_url = ""
|
||||
if not _hx_fragment_request() or page==1:
|
||||
items, brands, stickers, labels, total_pages, liked_count, search_count = await products(
|
||||
list_url,
|
||||
selected_brands=selected_brands,
|
||||
selected_stickers=selected_stickers,
|
||||
selected_labels=selected_labels,
|
||||
page=page,
|
||||
search=search,
|
||||
sort=sort,
|
||||
user_id=g.user.id if g.user else None,
|
||||
liked = liked,
|
||||
)
|
||||
|
||||
brands_ordered = _order_brands_selected_first(brands, selected_brands)
|
||||
|
||||
return {
|
||||
"products": items,
|
||||
"page": page,
|
||||
"search": search,
|
||||
"sort": sort,
|
||||
"total_pages": int(total_pages or 1),
|
||||
"brands": brands_ordered,
|
||||
"selected_brands": selected_brands,
|
||||
"stickers": stickers,
|
||||
"selected_stickers": selected_stickers,
|
||||
"labels": labels,
|
||||
"selected_labels": selected_labels,
|
||||
"liked": liked,
|
||||
"liked_count": liked_count,
|
||||
"search_count": search_count
|
||||
}
|
||||
else:
|
||||
items, total_pages = await products_nocounts(
|
||||
g.s,
|
||||
list_url,
|
||||
selected_brands=selected_brands,
|
||||
selected_stickers=selected_stickers,
|
||||
selected_labels=selected_labels,
|
||||
page=page,
|
||||
search=search,
|
||||
sort=sort,
|
||||
user_id=g.user.id if g.user else None,
|
||||
liked = liked,
|
||||
)
|
||||
return {
|
||||
"products": items,
|
||||
"page": page,
|
||||
"search": search,
|
||||
"sort": sort,
|
||||
"total_pages": int(total_pages or 1),
|
||||
}
|
||||
|
||||
|
||||
def _order_brands_selected_first(brands, selected):
|
||||
"""Return brands with the selected brand(s) first."""
|
||||
if not brands or not selected:
|
||||
return brands
|
||||
sel = [(s or "").strip() for s in selected]
|
||||
head = [s for s in brands if (s.get("name") or "").strip() in sel]
|
||||
tail = [s for s in brands if (s.get("name") or "").strip() not in sel]
|
||||
return head + tail
|
||||
|
||||
|
||||
def _order_stickers_selected_first(
|
||||
stickers: list[dict], selected_stickers: list[str] | None
|
||||
):
|
||||
if not stickers or not selected_stickers:
|
||||
return stickers
|
||||
sel = [(s or "").strip().lower() for s in selected_stickers]
|
||||
head = [s for s in stickers if (s.get("name") or "").strip().lower() in sel]
|
||||
tail = [
|
||||
s
|
||||
for s in stickers
|
||||
if (s.get("name") or "").strip().lower() not in sel
|
||||
]
|
||||
return head + tail
|
||||
|
||||
|
||||
def _order_labels_selected_first(
|
||||
labels: list[dict], selected_labels: list[str] | None
|
||||
):
|
||||
if not labels or not selected_labels:
|
||||
return labels
|
||||
sel = [(s or "").strip().lower() for s in selected_labels]
|
||||
head = [s for s in labels if (s.get("name") or "").strip().lower() in sel]
|
||||
tail = [
|
||||
s
|
||||
for s in labels
|
||||
if (s.get("name") or "").strip().lower() not in sel
|
||||
]
|
||||
return head + tail
|
||||
|
||||
def _massage_product(d):
|
||||
"""
|
||||
Normalise the product dict for templates:
|
||||
- inject APP_ROOT into HTML
|
||||
- drop blacklisted sections
|
||||
"""
|
||||
massaged = {
|
||||
**d,
|
||||
"description_html": d["description_html"].replace(
|
||||
"[**__APP_ROOT__**]", g.root
|
||||
),
|
||||
"sections": [
|
||||
{
|
||||
**section,
|
||||
"html": section["html"].replace(
|
||||
"[**__APP_ROOT__**]", g.root
|
||||
),
|
||||
}
|
||||
for section in d["sections"]
|
||||
if not is_blacklisted_heading(section["title"])
|
||||
],
|
||||
}
|
||||
return massaged
|
||||
|
||||
|
||||
# Re-export from canonical shared location
|
||||
from shared.http_utils import vary as _vary, current_url_without_page as _current_url_without_page
|
||||
|
||||
async def _is_liked(user_id: int | None, slug: str) -> bool:
|
||||
"""
|
||||
Check if this user has liked this product.
|
||||
"""
|
||||
if not user_id:
|
||||
return False
|
||||
# because ProductLike has composite PK (user_id, product_slug),
|
||||
# we can fetch it by primary key dict:
|
||||
row = await g.s.execute(
|
||||
select(ProductLike).where(
|
||||
ProductLike.user_id == user_id,
|
||||
ProductLike.product_slug == slug,
|
||||
)
|
||||
)
|
||||
row.scalar_one_or_none()
|
||||
return row is not None
|
||||
|
||||
|
||||
24
bp/browse/services/slugs.py
Normal file
24
bp/browse/services/slugs.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import re
|
||||
from urllib.parse import urljoin, urlparse
|
||||
from config import config
|
||||
|
||||
def product_slug_from_href(href: str) -> str:
|
||||
p = urlparse(href)
|
||||
parts = [x for x in p.path.split("/") if x]
|
||||
if not parts:
|
||||
return ""
|
||||
last = parts[-1]
|
||||
if last.endswith(".html"):
|
||||
last = last[:-5]
|
||||
elif last.endswith(".htm"):
|
||||
last = last[:-4]
|
||||
last = re.sub(r"-(html|htm)+$", "", last, flags=re.I)
|
||||
return f"{last}-html"
|
||||
|
||||
def canonical_html_slug(slug: str) -> str:
|
||||
base = re.sub(r"-(html|htm)+$", "", slug, flags=re.I)
|
||||
return f"{base}-html"
|
||||
|
||||
def suma_href_from_html_slug(slug: str) -> str:
|
||||
canon = canonical_html_slug(slug)
|
||||
return urljoin(config()["base_url"], f"/{canon}.html")
|
||||
21
bp/browse/services/state.py
Normal file
21
bp/browse/services/state.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from typing import Dict, Tuple, List
|
||||
import time
|
||||
|
||||
_nav_cache: dict = {}
|
||||
_nav_cache_ts: float = 0.0
|
||||
_nav_ttl_seconds = 60 * 60 * 6
|
||||
|
||||
_detail_cache: Dict[str, Dict] = {}
|
||||
_detail_cache_ts: Dict[str, float] = {}
|
||||
_detail_ttl_seconds = 60 * 60 * 6
|
||||
|
||||
KNOWN_PRODUCT_SLUGS: set[str] = set()
|
||||
|
||||
_listing_variant_cache: Dict[str, Tuple[str, float]] = {}
|
||||
_listing_variant_ttl = 60 * 60 * 6
|
||||
|
||||
_listing_page_cache: Dict[str, Tuple[Tuple[List[Dict], int], float]] = {}
|
||||
_listing_page_ttl = 60 * 30
|
||||
|
||||
def now() -> float:
|
||||
return time.time()
|
||||
7
bp/market/__init__.py
Normal file
7
bp/market/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
# create the blueprint at package import time
|
||||
from .routes import register # = Blueprint("browse_bp", __name__)
|
||||
|
||||
# import routes AFTER browse_bp is defined so routes can attach to it
|
||||
from . import routes # noqa: F401
|
||||
0
bp/market/admin/__init__.py
Normal file
0
bp/market/admin/__init__.py
Normal file
28
bp/market/admin/routes.py
Normal file
28
bp/market/admin/routes.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from quart import (
|
||||
render_template, make_response, Blueprint
|
||||
)
|
||||
|
||||
|
||||
from suma_browser.app.authz import require_admin
|
||||
|
||||
|
||||
def register():
|
||||
bp = Blueprint("admin", __name__, url_prefix='/admin')
|
||||
|
||||
# ---------- Pages ----------
|
||||
@bp.get("/")
|
||||
@require_admin
|
||||
async def admin():
|
||||
from suma_browser.app.utils.htmx import is_htmx_request
|
||||
|
||||
# Determine which template to use based on request type
|
||||
if not is_htmx_request():
|
||||
# Normal browser request: full page with layout
|
||||
html = await render_template("_types/market/admin/index.html")
|
||||
else:
|
||||
html = await render_template("_types/market/admin/_oob_elements.html")
|
||||
|
||||
return await make_response(html)
|
||||
return bp
|
||||
0
bp/market/filters/__init__.py
Normal file
0
bp/market/filters/__init__.py
Normal file
101
bp/market/filters/qs.py
Normal file
101
bp/market/filters/qs.py
Normal file
@@ -0,0 +1,101 @@
|
||||
from quart import request
|
||||
|
||||
from typing import Iterable, Optional, Union
|
||||
|
||||
from suma_browser.app.filters.qs_base import (
|
||||
KEEP, _norm, make_filter_set, build_qs,
|
||||
)
|
||||
from suma_browser.app.filters.query_types import MarketQuery
|
||||
|
||||
|
||||
def decode() -> MarketQuery:
|
||||
page = int(request.args.get("page", 1))
|
||||
search = request.args.get("search")
|
||||
sort = request.args.get("sort")
|
||||
liked = request.args.get("liked")
|
||||
|
||||
selected_brands = tuple(s.strip() for s in request.args.getlist("brand") if s.strip())
|
||||
selected_stickers = tuple(s.strip().lower() for s in request.args.getlist("sticker") if s.strip())
|
||||
selected_labels = tuple(s.strip().lower() for s in request.args.getlist("label") if s.strip())
|
||||
|
||||
return MarketQuery(page, search, sort, selected_brands, selected_stickers, selected_labels, liked)
|
||||
|
||||
|
||||
def makeqs_factory():
|
||||
"""
|
||||
Build a makeqs(...) that starts from the current filters + page.
|
||||
Auto-resets page to 1 when filters change unless you pass page explicitly.
|
||||
"""
|
||||
q = decode()
|
||||
base_stickers = [s for s in q.selected_stickers if (s or "").strip()]
|
||||
base_labels = [s for s in q.selected_labels if (s or "").strip()]
|
||||
base_brands = [s for s in q.selected_brands if (s or "").strip()]
|
||||
base_search = q.search or None
|
||||
base_liked = q.liked or None
|
||||
base_sort = q.sort or None
|
||||
base_page = int(q.page or 1)
|
||||
|
||||
def makeqs(
|
||||
*,
|
||||
clear_filters: bool = False,
|
||||
add_sticker: Union[str, Iterable[str], None] = None,
|
||||
remove_sticker: Union[str, Iterable[str], None] = None,
|
||||
add_label: Union[str, Iterable[str], None] = None,
|
||||
remove_label: Union[str, Iterable[str], None] = None,
|
||||
add_brand: Union[str, Iterable[str], None] = None,
|
||||
remove_brand: Union[str, Iterable[str], None] = None,
|
||||
search: Union[str, None, object] = KEEP,
|
||||
sort: Union[str, None, object] = KEEP,
|
||||
page: Union[int, None, object] = None,
|
||||
extra: Optional[Iterable[tuple]] = None,
|
||||
leading_q: bool = True,
|
||||
liked: Union[bool, None, object] = KEEP,
|
||||
) -> str:
|
||||
stickers = make_filter_set(base_stickers, add_sticker, remove_sticker, clear_filters)
|
||||
labels = make_filter_set(base_labels, add_label, remove_label, clear_filters)
|
||||
brands = make_filter_set(base_brands, add_brand, remove_brand, clear_filters)
|
||||
|
||||
final_search = None if clear_filters else base_search if search is KEEP else ((search or "").strip() or None)
|
||||
final_sort = base_sort if sort is KEEP else (sort or None)
|
||||
final_liked = None if clear_filters else base_liked if liked is KEEP else liked
|
||||
|
||||
# Did filters change?
|
||||
filters_changed = (
|
||||
set(map(_norm, stickers)) != set(map(_norm, base_stickers))
|
||||
or set(map(_norm, labels)) != set(map(_norm, base_labels))
|
||||
or set(map(_norm, brands)) != set(map(_norm, base_brands))
|
||||
or final_search != base_search
|
||||
or final_sort != base_sort
|
||||
or final_liked != base_liked
|
||||
)
|
||||
|
||||
# Page logic
|
||||
if page is KEEP:
|
||||
final_page = 1 if filters_changed else base_page
|
||||
else:
|
||||
final_page = page
|
||||
|
||||
# Build params
|
||||
params = []
|
||||
for s in stickers:
|
||||
params.append(("sticker", s))
|
||||
for s in labels:
|
||||
params.append(("label", s))
|
||||
for s in brands:
|
||||
params.append(("brand", s))
|
||||
if final_search:
|
||||
params.append(("search", final_search))
|
||||
if final_liked is not None:
|
||||
params.append(("liked", final_liked))
|
||||
if final_sort:
|
||||
params.append(("sort", final_sort))
|
||||
if final_page is not None:
|
||||
params.append(("page", str(final_page)))
|
||||
if extra:
|
||||
for k, v in extra:
|
||||
if v is not None:
|
||||
params.append((k, str(v)))
|
||||
|
||||
return build_qs(params, leading_q=leading_q)
|
||||
|
||||
return makeqs
|
||||
44
bp/market/routes.py
Normal file
44
bp/market/routes.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from quart import Blueprint, g, render_template, make_response, url_for
|
||||
|
||||
|
||||
from ..browse.routes import register as register_browse_bp
|
||||
|
||||
from .filters.qs import makeqs_factory
|
||||
from ..browse.services.nav import get_nav
|
||||
from ..api.routes import products_api
|
||||
from .admin.routes import register as register_admin
|
||||
|
||||
|
||||
|
||||
def register(url_prefix, title):
|
||||
bp = Blueprint("market", __name__, url_prefix)
|
||||
|
||||
@bp.before_request
|
||||
def route():
|
||||
g.makeqs_factory = makeqs_factory
|
||||
|
||||
|
||||
@bp.context_processor
|
||||
async def inject_root():
|
||||
return {
|
||||
"coop_title": title,
|
||||
"categories": (await get_nav(g.s))["cats"],
|
||||
"qs": makeqs_factory()(),
|
||||
}
|
||||
|
||||
bp.register_blueprint(
|
||||
register_browse_bp(),
|
||||
)
|
||||
bp.register_blueprint(
|
||||
products_api,
|
||||
)
|
||||
bp.register_blueprint(
|
||||
register_admin(),
|
||||
)
|
||||
|
||||
|
||||
|
||||
return bp
|
||||
|
||||
248
bp/product/routes.py
Normal file
248
bp/product/routes.py
Normal file
@@ -0,0 +1,248 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from quart import (
|
||||
g,
|
||||
Blueprint,
|
||||
abort,
|
||||
redirect,
|
||||
render_template,
|
||||
make_response,
|
||||
)
|
||||
from sqlalchemy import select, func, update
|
||||
|
||||
from models.market import Product, ProductLike
|
||||
from ..browse.services.slugs import canonical_html_slug
|
||||
from ..browse.services.blacklist.product import is_product_blocked
|
||||
from ..browse.services import db_backend as cb
|
||||
from ..browse.services import _massage_product
|
||||
from utils import host_url
|
||||
from suma_browser.app.redis_cacher import cache_page, clear_cache
|
||||
from ..cart.services import total
|
||||
from .services.product_operations import toggle_product_like, massage_full_product
|
||||
|
||||
|
||||
def register():
|
||||
bp = Blueprint("product", __name__, url_prefix="/product/<slug>")
|
||||
@bp.url_value_preprocessor
|
||||
def pull_blog(endpoint, values):
|
||||
g.product_slug = values.get("slug")
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# BEFORE REQUEST: Slug or numeric ID resolver
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
@bp.before_request
|
||||
async def resolve_product():
|
||||
raw_slug = g.product_slug = getattr(g, "product_slug", None)
|
||||
if raw_slug is None:
|
||||
return
|
||||
|
||||
# 1. If slug is INT → load product by ID
|
||||
if raw_slug.isdigit():
|
||||
product_id = int(raw_slug)
|
||||
|
||||
product = await cb.db_product_full_id(
|
||||
g.s, product_id, user_id=g.user.id if g.user else 0
|
||||
)
|
||||
|
||||
if not product:
|
||||
abort(404)
|
||||
|
||||
# If product is deleted → SHOW as-is
|
||||
if product["deleted_at"]:
|
||||
d = product
|
||||
g.item_data = {"d": d, "slug": product["slug"], "liked": False}
|
||||
return
|
||||
|
||||
# Not deleted → redirect to canonical slug
|
||||
canon = canonical_html_slug(product["slug"])
|
||||
return redirect(
|
||||
host_url(url_for("market.browse.product.product_detail", slug=canon))
|
||||
)
|
||||
|
||||
# 2. Normal slug-based behaviour
|
||||
if is_product_blocked(raw_slug):
|
||||
abort(404)
|
||||
|
||||
canon = canonical_html_slug(raw_slug)
|
||||
if canon != raw_slug:
|
||||
return redirect(
|
||||
host_url(url_for("product.product_detail", slug=canon))
|
||||
)
|
||||
|
||||
# hydrate full product
|
||||
d = await cb.db_product_full(
|
||||
g.s, canon, user_id=g.user.id if g.user else 0
|
||||
)
|
||||
if not d:
|
||||
abort(404)
|
||||
g.item_data = {"d": d, "slug": canon, "liked": d["is_liked"]}
|
||||
|
||||
@bp.context_processor
|
||||
def context():
|
||||
item_data = getattr(g, "item_data", None)
|
||||
|
||||
if item_data:
|
||||
return {
|
||||
**item_data,
|
||||
}
|
||||
else:
|
||||
return {}
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# RENDER PRODUCT
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
@bp.get("/")
|
||||
@cache_page(tag="browse")
|
||||
async def product_detail(slug: str):
|
||||
from suma_browser.app.utils.htmx import is_htmx_request
|
||||
|
||||
# Determine which template to use based on request type
|
||||
if not is_htmx_request():
|
||||
# Normal browser request: full page with layout
|
||||
html = await render_template("_types/product/index.html")
|
||||
else:
|
||||
# HTMX request: main panel + OOB elements
|
||||
html = await render_template("_types/product/_oob_elements.html")
|
||||
|
||||
return html
|
||||
|
||||
@bp.post("/like/toggle/")
|
||||
@clear_cache(tag="browse", tag_scope="user")
|
||||
async def like_toggle(slug):
|
||||
# Use slug from URL parameter (set by url_prefix="/product/<slug>")
|
||||
product_slug = slug
|
||||
|
||||
if not g.user:
|
||||
html = await render_template(
|
||||
"_types/browse/like/button.html",
|
||||
slug=product_slug,
|
||||
liked=False,
|
||||
)
|
||||
resp = make_response(html, 403)
|
||||
return resp
|
||||
|
||||
user_id = g.user.id
|
||||
|
||||
liked, error = await toggle_product_like(g.s, user_id, product_slug)
|
||||
|
||||
if error:
|
||||
resp = make_response(error, 404)
|
||||
return resp
|
||||
|
||||
html = await render_template(
|
||||
"_types/browse/like/button.html",
|
||||
slug=product_slug,
|
||||
liked=liked,
|
||||
)
|
||||
return html
|
||||
|
||||
|
||||
|
||||
@bp.get("/admin/")
|
||||
async def admin(slug: str):
|
||||
from suma_browser.app.utils.htmx import is_htmx_request
|
||||
|
||||
if not is_htmx_request():
|
||||
# Normal browser request: full page with layout
|
||||
html = await render_template("_types/product/admin/index.html")
|
||||
else:
|
||||
# HTMX request: main panel + OOB elements
|
||||
html = await render_template("_types/product/admin/_oob_elements.html")
|
||||
|
||||
return await make_response(html)
|
||||
|
||||
|
||||
from suma_browser.app.bp.cart.services.identity import current_cart_identity
|
||||
#from suma_browser.app.bp.cart.routes import view_cart
|
||||
from models.market import CartItem
|
||||
from quart import request, url_for
|
||||
|
||||
@bp.post("/cart/")
|
||||
@clear_cache(tag="browse", tag_scope="user")
|
||||
async def cart(slug: str):
|
||||
# make sure product exists (we *allow* deleted_at != None later if you want)
|
||||
product_id = await g.s.scalar(
|
||||
select(Product.id).where(
|
||||
Product.slug == slug,
|
||||
Product.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
|
||||
product = await g.s.scalar(
|
||||
select(Product).where(Product.id == product_id)
|
||||
)
|
||||
if not product:
|
||||
return await make_response("Product not found", 404)
|
||||
|
||||
# --- NEW: read `count` from body (JSON or form), default to 1 ---
|
||||
count = 1
|
||||
try:
|
||||
if request.is_json:
|
||||
data = await request.get_json()
|
||||
if data is not None and "count" in data:
|
||||
count = int(data["count"])
|
||||
else:
|
||||
form = await request.form
|
||||
if "count" in form:
|
||||
count = int(form["count"])
|
||||
except (ValueError, TypeError):
|
||||
# if parsing fails, just fall back to 1
|
||||
count = 1
|
||||
# --- END NEW ---
|
||||
|
||||
ident = current_cart_identity()
|
||||
|
||||
filters = [CartItem.deleted_at.is_(None), CartItem.product_id == product_id]
|
||||
if ident["user_id"] is not None:
|
||||
filters.append(CartItem.user_id == ident["user_id"])
|
||||
else:
|
||||
filters.append(CartItem.session_id == ident["session_id"])
|
||||
|
||||
ci = next(
|
||||
(item for item in g.cart if item.product_id == product_id),
|
||||
None,
|
||||
)
|
||||
|
||||
# --- NEW: set quantity based on `count` ---
|
||||
if ci:
|
||||
if count > 0:
|
||||
ci.quantity = count
|
||||
else:
|
||||
# count <= 0 → remove from cart entirely
|
||||
ci.quantity=0
|
||||
g.cart.remove(ci)
|
||||
await g.s.delete(ci)
|
||||
|
||||
else:
|
||||
if count > 0:
|
||||
ci = CartItem(
|
||||
user_id=ident["user_id"],
|
||||
session_id=ident["session_id"],
|
||||
product_id=product.id,
|
||||
product=product,
|
||||
quantity=count,
|
||||
)
|
||||
g.cart.append(ci)
|
||||
g.s.add(ci)
|
||||
# if count <= 0 and no existing item, do nothing
|
||||
# --- END NEW ---
|
||||
|
||||
# no explicit commit; your session middleware should handle it
|
||||
|
||||
# htmx support (optional)
|
||||
if request.headers.get("HX-Request") == "true":
|
||||
# You can return a small fragment or mini-cart here
|
||||
|
||||
return await render_template(
|
||||
"_types/product/_added.html",
|
||||
cart=g.cart,
|
||||
item=ci,
|
||||
total = total
|
||||
)
|
||||
|
||||
# normal POST: go to cart page
|
||||
return redirect(url_for("cart.view_cart"))
|
||||
|
||||
|
||||
|
||||
return bp
|
||||
3
bp/product/services/__init__.py
Normal file
3
bp/product/services/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .product_operations import toggle_product_like, massage_full_product
|
||||
|
||||
__all__ = ["toggle_product_like", "massage_full_product"]
|
||||
95
bp/product/services/product_operations.py
Normal file
95
bp/product/services/product_operations.py
Normal file
@@ -0,0 +1,95 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from models.market import Product, ProductLike
|
||||
|
||||
|
||||
def massage_full_product(product: Product) -> dict:
|
||||
"""
|
||||
Convert a Product ORM model to a dictionary with all fields.
|
||||
Used for rendering product detail pages.
|
||||
"""
|
||||
from suma_browser.app.bp.browse.services import _massage_product
|
||||
|
||||
gallery = []
|
||||
if product.image:
|
||||
gallery.append(product.image)
|
||||
|
||||
d = {
|
||||
"id": product.id,
|
||||
"slug": product.slug,
|
||||
"title": product.title,
|
||||
"brand": product.brand,
|
||||
"image": product.image,
|
||||
"description_short": product.description_short,
|
||||
"description_html": product.description_html or "",
|
||||
"suma_href": product.suma_href,
|
||||
"rrp": float(product.rrp) if product.rrp else None,
|
||||
"special_price": float(product.special_price) if product.special_price else None,
|
||||
"regular_price": float(product.regular_price) if product.regular_price else None,
|
||||
"images": gallery or [img.url for img in product.images],
|
||||
"all_image_urls": gallery or [img.url for img in product.images],
|
||||
"sections": [{"title": s.title, "html": s.html} for s in product.sections],
|
||||
"stickers": [s.name.lower() for s in product.stickers],
|
||||
"labels": [l.name for l in product.labels],
|
||||
"nutrition": [{"key": n.key, "value": n.value, "unit": n.unit} for n in product.nutrition],
|
||||
"allergens": [{"name": a.name, "contains": a.contains} for a in product.allergens],
|
||||
"is_liked": False,
|
||||
}
|
||||
|
||||
return _massage_product(d)
|
||||
|
||||
|
||||
async def toggle_product_like(
|
||||
session: AsyncSession,
|
||||
user_id: int,
|
||||
product_slug: str,
|
||||
) -> tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Toggle a product like for a given user using soft deletes.
|
||||
Returns (liked_state, error_message).
|
||||
- If error_message is not None, an error occurred.
|
||||
- liked_state indicates whether product is now liked (True) or unliked (False).
|
||||
"""
|
||||
from sqlalchemy import func, update
|
||||
|
||||
# Get product_id from slug
|
||||
product_id = await session.scalar(
|
||||
select(Product.id).where(Product.slug == product_slug, Product.deleted_at.is_(None))
|
||||
)
|
||||
if not product_id:
|
||||
return False, "Product not found"
|
||||
|
||||
# Check if like exists (not deleted)
|
||||
existing = await session.scalar(
|
||||
select(ProductLike).where(
|
||||
ProductLike.user_id == user_id,
|
||||
ProductLike.product_slug == product_slug,
|
||||
ProductLike.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
|
||||
if existing:
|
||||
# Unlike: soft delete the like
|
||||
await session.execute(
|
||||
update(ProductLike)
|
||||
.where(
|
||||
ProductLike.user_id == user_id,
|
||||
ProductLike.product_slug == product_slug,
|
||||
ProductLike.deleted_at.is_(None),
|
||||
)
|
||||
.values(deleted_at=func.now())
|
||||
)
|
||||
return False, None
|
||||
else:
|
||||
# Like: add a new like
|
||||
new_like = ProductLike(
|
||||
user_id=user_id,
|
||||
product_slug=product_slug,
|
||||
)
|
||||
session.add(new_like)
|
||||
return True, None
|
||||
Reference in New Issue
Block a user