Security audit: fix IDOR, add rate limiting, HMAC auth, token hashing, XSS sanitization
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 3m22s
All checks were successful
Build and Deploy / build-and-deploy (push) Successful in 3m22s
Critical: Add ownership checks to all order routes (IDOR fix). High: Redis rate limiting on auth endpoints, HMAC-signed internal service calls replacing header-presence-only checks, nh3 HTML sanitization on ghost_sync and product import, internal auth on market API endpoints, SHA-256 hashed OAuth grant/code tokens. Medium: SECRET_KEY production guard, AP signature enforcement, is_admin param removal, cart_sid validation, SSRF protection on remote actor fetch. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -57,10 +57,13 @@ async def protect() -> None:
|
||||
if _is_exempt_endpoint():
|
||||
return
|
||||
|
||||
# Internal service-to-service calls are already gated by header checks
|
||||
# and only reachable on the Docker overlay network.
|
||||
# Internal service-to-service calls — validate HMAC signature
|
||||
if request.headers.get("X-Internal-Action") or request.headers.get("X-Internal-Data"):
|
||||
return
|
||||
from shared.infrastructure.internal_auth import validate_internal_request
|
||||
if validate_internal_request():
|
||||
return
|
||||
# Reject unsigned internal requests
|
||||
abort(403, "Invalid internal request signature")
|
||||
|
||||
session_token = qsession.get("csrf_token")
|
||||
if not session_token:
|
||||
|
||||
@@ -13,6 +13,8 @@ import os
|
||||
|
||||
import httpx
|
||||
|
||||
from shared.infrastructure.internal_auth import sign_internal_headers
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Re-usable async client (created lazily, one per process)
|
||||
@@ -65,10 +67,11 @@ async def call_action(
|
||||
base = _internal_url(app_name)
|
||||
url = f"{base}/internal/actions/{action_name}"
|
||||
try:
|
||||
headers = {ACTION_HEADER: "1", **sign_internal_headers(app_name)}
|
||||
resp = await _get_client().post(
|
||||
url,
|
||||
json=payload or {},
|
||||
headers={ACTION_HEADER: "1"},
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
)
|
||||
if 200 <= resp.status_code < 300:
|
||||
|
||||
@@ -328,9 +328,10 @@ def create_activitypub_blueprint(app_name: str) -> Blueprint:
|
||||
|
||||
if not sig_valid:
|
||||
log.warning(
|
||||
"Unverified inbox POST from %s (%s) on %s — accepting anyway for now",
|
||||
"Unverified inbox POST from %s (%s) on %s — rejecting",
|
||||
from_actor_url, activity_type, domain,
|
||||
)
|
||||
abort(401, "Invalid or missing HTTP signature")
|
||||
|
||||
# Load actor row for DB operations
|
||||
actor_row = (
|
||||
|
||||
@@ -29,8 +29,43 @@ AP_CONTENT_TYPE = "application/activity+json"
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _is_safe_url(url: str) -> bool:
|
||||
"""Reject URLs pointing to private/internal IPs to prevent SSRF."""
|
||||
from urllib.parse import urlparse
|
||||
import ipaddress
|
||||
|
||||
parsed = urlparse(url)
|
||||
|
||||
# Require HTTPS
|
||||
if parsed.scheme != "https":
|
||||
return False
|
||||
|
||||
hostname = parsed.hostname
|
||||
if not hostname:
|
||||
return False
|
||||
|
||||
# Block obvious internal hostnames
|
||||
if hostname in ("localhost", "127.0.0.1", "::1", "0.0.0.0"):
|
||||
return False
|
||||
|
||||
try:
|
||||
addr = ipaddress.ip_address(hostname)
|
||||
if addr.is_private or addr.is_loopback or addr.is_reserved or addr.is_link_local:
|
||||
return False
|
||||
except ValueError:
|
||||
# Not an IP literal — hostname is fine (DNS resolution handled by httpx)
|
||||
# Block common internal DNS patterns
|
||||
if hostname.endswith(".internal") or hostname.endswith(".local"):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def fetch_remote_actor(actor_url: str) -> dict | None:
|
||||
"""Fetch a remote actor's JSON-LD profile."""
|
||||
if not _is_safe_url(actor_url):
|
||||
log.warning("Blocked SSRF attempt: %s", actor_url)
|
||||
return None
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
|
||||
@@ -13,6 +13,8 @@ import os
|
||||
|
||||
import httpx
|
||||
|
||||
from shared.infrastructure.internal_auth import sign_internal_headers
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Re-usable async client (created lazily, one per process)
|
||||
@@ -66,10 +68,11 @@ async def fetch_data(
|
||||
base = _internal_url(app_name)
|
||||
url = f"{base}/internal/data/{query_name}"
|
||||
try:
|
||||
headers = {DATA_HEADER: "1", **sign_internal_headers(app_name)}
|
||||
resp = await _get_client().get(
|
||||
url,
|
||||
params=params,
|
||||
headers={DATA_HEADER: "1"},
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
|
||||
@@ -77,7 +77,13 @@ def create_base_app(
|
||||
|
||||
configure_logging(name)
|
||||
|
||||
app.secret_key = os.getenv("SECRET_KEY", "dev-secret-key-change-me-777")
|
||||
secret_key = os.getenv("SECRET_KEY")
|
||||
if not secret_key:
|
||||
env = os.getenv("ENVIRONMENT", "development")
|
||||
if env in ("production", "staging"):
|
||||
raise RuntimeError("SECRET_KEY environment variable must be set in production")
|
||||
secret_key = "dev-secret-key-change-me-777"
|
||||
app.secret_key = secret_key
|
||||
|
||||
# Per-app first-party session cookie (no shared domain — avoids Safari ITP)
|
||||
app.config["SESSION_COOKIE_NAME"] = f"{name}_session"
|
||||
@@ -192,11 +198,14 @@ def create_base_app(
|
||||
|
||||
from sqlalchemy import select
|
||||
from shared.db.session import get_account_session
|
||||
from shared.models.oauth_grant import OAuthGrant
|
||||
from shared.models.oauth_grant import OAuthGrant, hash_token
|
||||
try:
|
||||
token_h = hash_token(grant_token)
|
||||
async with get_account_session() as s:
|
||||
grant = await s.scalar(
|
||||
select(OAuthGrant).where(OAuthGrant.token == grant_token)
|
||||
select(OAuthGrant).where(
|
||||
(OAuthGrant.token_hash == token_h) | (OAuthGrant.token == grant_token)
|
||||
)
|
||||
)
|
||||
valid = grant is not None and grant.revoked_at is None
|
||||
except Exception:
|
||||
|
||||
92
shared/infrastructure/internal_auth.py
Normal file
92
shared/infrastructure/internal_auth.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""HMAC-based authentication for internal service-to-service calls.
|
||||
|
||||
Replaces the previous header-presence-only check with a signed token
|
||||
that includes a timestamp to prevent replay attacks.
|
||||
|
||||
Signing side (data_client.py / actions.py)::
|
||||
|
||||
from shared.infrastructure.internal_auth import sign_internal_headers
|
||||
headers = sign_internal_headers("cart")
|
||||
|
||||
Validation side (before_request guards, csrf.py)::
|
||||
|
||||
from shared.infrastructure.internal_auth import validate_internal_request
|
||||
if not validate_internal_request():
|
||||
abort(403)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import hmac
|
||||
import os
|
||||
import time
|
||||
|
||||
from quart import request
|
||||
|
||||
# Shared secret — MUST be set in production
|
||||
_SECRET = os.getenv("INTERNAL_HMAC_SECRET", "").encode() or os.getenv("SECRET_KEY", "").encode()
|
||||
|
||||
# Maximum age of a signed request (seconds)
|
||||
_MAX_AGE = 300 # 5 minutes
|
||||
|
||||
|
||||
def _get_secret() -> bytes:
|
||||
return _SECRET or os.getenv("SECRET_KEY", "dev-secret-key-change-me-777").encode()
|
||||
|
||||
|
||||
def sign_internal_headers(app_name: str) -> dict[str, str]:
|
||||
"""Generate signed headers for an internal request.
|
||||
|
||||
Returns a dict of headers to include in the request.
|
||||
"""
|
||||
ts = str(int(time.time()))
|
||||
payload = f"{ts}:{app_name}".encode()
|
||||
sig = hmac.new(_get_secret(), payload, hashlib.sha256).hexdigest()
|
||||
return {
|
||||
"X-Internal-Timestamp": ts,
|
||||
"X-Internal-App": app_name,
|
||||
"X-Internal-Signature": sig,
|
||||
}
|
||||
|
||||
|
||||
def validate_internal_request() -> bool:
|
||||
"""Validate that an incoming request has a valid HMAC signature.
|
||||
|
||||
Checks X-Internal-Timestamp, X-Internal-App, and X-Internal-Signature
|
||||
headers. Returns True if valid, False otherwise.
|
||||
"""
|
||||
ts = request.headers.get("X-Internal-Timestamp", "")
|
||||
app_name = request.headers.get("X-Internal-App", "")
|
||||
sig = request.headers.get("X-Internal-Signature", "")
|
||||
|
||||
if not ts or not app_name or not sig:
|
||||
return False
|
||||
|
||||
# Check timestamp freshness
|
||||
try:
|
||||
req_time = int(ts)
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
||||
now = int(time.time())
|
||||
if abs(now - req_time) > _MAX_AGE:
|
||||
return False
|
||||
|
||||
# Verify signature
|
||||
payload = f"{ts}:{app_name}".encode()
|
||||
expected = hmac.new(_get_secret(), payload, hashlib.sha256).hexdigest()
|
||||
return hmac.compare_digest(sig, expected)
|
||||
|
||||
|
||||
def is_internal_request() -> bool:
|
||||
"""Check if the current request is a signed internal request.
|
||||
|
||||
This is a convenience that checks for any of the internal headers
|
||||
(legacy or new HMAC-signed).
|
||||
"""
|
||||
# New HMAC-signed headers
|
||||
if request.headers.get("X-Internal-Signature"):
|
||||
return validate_internal_request()
|
||||
# Legacy: presence-only headers (still accepted during migration,
|
||||
# but callers should be updated to use signed headers)
|
||||
return False
|
||||
142
shared/infrastructure/rate_limit.py
Normal file
142
shared/infrastructure/rate_limit.py
Normal file
@@ -0,0 +1,142 @@
|
||||
"""Redis-based rate limiter for auth endpoints.
|
||||
|
||||
Provides a decorator that enforces per-key rate limits using a sliding
|
||||
window counter stored in Redis (auth DB 15).
|
||||
|
||||
Usage::
|
||||
|
||||
from shared.infrastructure.rate_limit import rate_limit
|
||||
|
||||
@rate_limit(key_func=lambda: request.form.get("email", "").lower(),
|
||||
max_requests=5, window_seconds=900, scope="magic_link")
|
||||
@bp.post("/start/")
|
||||
async def start_login():
|
||||
...
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
import time
|
||||
|
||||
from quart import request, jsonify, make_response
|
||||
|
||||
|
||||
async def _check_rate_limit(
|
||||
key: str,
|
||||
max_requests: int,
|
||||
window_seconds: int,
|
||||
) -> tuple[bool, int]:
|
||||
"""Check and increment rate limit counter.
|
||||
|
||||
Returns (allowed, remaining).
|
||||
"""
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
|
||||
r = await get_auth_redis()
|
||||
now = time.time()
|
||||
window_start = now - window_seconds
|
||||
redis_key = f"rl:{key}"
|
||||
|
||||
pipe = r.pipeline()
|
||||
# Remove expired entries
|
||||
pipe.zremrangebyscore(redis_key, 0, window_start)
|
||||
# Add current request
|
||||
pipe.zadd(redis_key, {str(now).encode(): now})
|
||||
# Count entries in window
|
||||
pipe.zcard(redis_key)
|
||||
# Set TTL so key auto-expires
|
||||
pipe.expire(redis_key, window_seconds)
|
||||
results = await pipe.execute()
|
||||
|
||||
count = results[2]
|
||||
allowed = count <= max_requests
|
||||
remaining = max(0, max_requests - count)
|
||||
|
||||
return allowed, remaining
|
||||
|
||||
|
||||
def rate_limit(
|
||||
*,
|
||||
key_func,
|
||||
max_requests: int,
|
||||
window_seconds: int,
|
||||
scope: str,
|
||||
):
|
||||
"""Decorator that rate-limits a Quart route.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
key_func:
|
||||
Callable returning the rate-limit key (e.g. email, IP).
|
||||
Called inside request context.
|
||||
max_requests:
|
||||
Maximum number of requests allowed in the window.
|
||||
window_seconds:
|
||||
Sliding window duration in seconds.
|
||||
scope:
|
||||
Namespace prefix for the Redis key (e.g. "magic_link").
|
||||
"""
|
||||
def decorator(fn):
|
||||
@functools.wraps(fn)
|
||||
async def wrapper(*args, **kwargs):
|
||||
raw_key = key_func()
|
||||
if not raw_key:
|
||||
return await fn(*args, **kwargs)
|
||||
|
||||
full_key = f"{scope}:{raw_key}"
|
||||
try:
|
||||
allowed, remaining = await _check_rate_limit(
|
||||
full_key, max_requests, window_seconds,
|
||||
)
|
||||
except Exception:
|
||||
# If Redis is down, allow the request
|
||||
return await fn(*args, **kwargs)
|
||||
|
||||
if not allowed:
|
||||
resp = await make_response(
|
||||
jsonify({"error": "rate_limited", "retry_after": window_seconds}),
|
||||
429,
|
||||
)
|
||||
resp.headers["Retry-After"] = str(window_seconds)
|
||||
return resp
|
||||
|
||||
return await fn(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
async def check_poll_backoff(device_code: str) -> tuple[bool, int]:
|
||||
"""Enforce exponential backoff on device token polling.
|
||||
|
||||
Returns (allowed, interval) where interval is the recommended
|
||||
poll interval in seconds. If not allowed, caller should return
|
||||
a 'slow_down' error per RFC 8628.
|
||||
"""
|
||||
from shared.infrastructure.auth_redis import get_auth_redis
|
||||
|
||||
r = await get_auth_redis()
|
||||
key = f"rl:devpoll:{device_code}"
|
||||
now = time.time()
|
||||
|
||||
raw = await r.get(key)
|
||||
if raw:
|
||||
data = raw.decode() if isinstance(raw, bytes) else raw
|
||||
parts = data.split(":")
|
||||
last_poll = float(parts[0])
|
||||
interval = int(parts[1])
|
||||
|
||||
elapsed = now - last_poll
|
||||
if elapsed < interval:
|
||||
# Too fast — increase interval
|
||||
new_interval = min(interval + 5, 60)
|
||||
await r.set(key, f"{now}:{new_interval}".encode(), ex=900)
|
||||
return False, new_interval
|
||||
|
||||
# Acceptable pace — keep current interval
|
||||
await r.set(key, f"{now}:{interval}".encode(), ex=900)
|
||||
return True, interval
|
||||
|
||||
# First poll
|
||||
initial_interval = 5
|
||||
await r.set(key, f"{now}:{initial_interval}".encode(), ex=900)
|
||||
return True, initial_interval
|
||||
@@ -1,4 +1,5 @@
|
||||
from __future__ import annotations
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
from sqlalchemy import String, Integer, DateTime, ForeignKey, func, Index
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
@@ -6,21 +7,28 @@ from shared.db.base import Base
|
||||
|
||||
|
||||
class OAuthCode(Base):
|
||||
"""Short-lived authorization code issued during OAuth flow.
|
||||
|
||||
The ``code`` column is retained during migration but new codes store
|
||||
only ``code_hash``. Lookups should use ``code_hash``.
|
||||
"""
|
||||
__tablename__ = "oauth_codes"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
code: Mapped[str] = mapped_column(String(128), unique=True, index=True, nullable=False)
|
||||
code: Mapped[str | None] = mapped_column(String(128), nullable=True)
|
||||
code_hash: Mapped[str | None] = mapped_column(String(64), unique=True, nullable=True, index=True)
|
||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
|
||||
client_id: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
redirect_uri: Mapped[str] = mapped_column(String(512), nullable=False)
|
||||
expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
used_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
grant_token: Mapped[str | None] = mapped_column(String(128), nullable=True)
|
||||
grant_token_hash: Mapped[str | None] = mapped_column(String(64), nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
|
||||
user = relationship("User", backref="oauth_codes")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_oauth_code_code", "code", unique=True),
|
||||
Index("ix_oauth_code_code_hash", "code_hash", unique=True),
|
||||
Index("ix_oauth_code_user", "user_id"),
|
||||
)
|
||||
|
||||
@@ -1,21 +1,31 @@
|
||||
from __future__ import annotations
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
from sqlalchemy import String, Integer, DateTime, ForeignKey, func, Index
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from shared.db.base import Base
|
||||
|
||||
|
||||
def hash_token(token: str) -> str:
|
||||
"""SHA-256 hash a token for secure DB storage."""
|
||||
return hashlib.sha256(token.encode()).hexdigest()
|
||||
|
||||
|
||||
class OAuthGrant(Base):
|
||||
"""Long-lived grant tracking each client-app session authorization.
|
||||
|
||||
Created when the OAuth authorize endpoint issues a code. Tied to the
|
||||
account session that issued it (``issuer_session``) so that logging out
|
||||
on one device revokes only that device's grants.
|
||||
|
||||
The ``token`` column is retained during migration but new grants store
|
||||
only ``token_hash``. Lookups should use ``token_hash``.
|
||||
"""
|
||||
__tablename__ = "oauth_grants"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
token: Mapped[str] = mapped_column(String(128), unique=True, nullable=False)
|
||||
token: Mapped[str | None] = mapped_column(String(128), nullable=True)
|
||||
token_hash: Mapped[str | None] = mapped_column(String(64), unique=True, nullable=True, index=True)
|
||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
|
||||
client_id: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
issuer_session: Mapped[str] = mapped_column(String(128), nullable=False, index=True)
|
||||
@@ -26,7 +36,7 @@ class OAuthGrant(Base):
|
||||
user = relationship("User", backref="oauth_grants")
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_oauth_grant_token", "token", unique=True),
|
||||
Index("ix_oauth_grant_token_hash", "token_hash", unique=True),
|
||||
Index("ix_oauth_grant_issuer", "issuer_session"),
|
||||
Index("ix_oauth_grant_device", "device_id", "client_id"),
|
||||
)
|
||||
|
||||
@@ -44,6 +44,7 @@ Werkzeug==3.1.3
|
||||
wsproto==1.2.0
|
||||
zstandard==0.25.0
|
||||
redis>=5.0
|
||||
nh3>=0.2.14
|
||||
mistune>=3.0
|
||||
pytest>=8.0
|
||||
pytest-asyncio>=0.23
|
||||
|
||||
Reference in New Issue
Block a user