v1
This commit is contained in:
@@ -0,0 +1,58 @@
|
||||
"""
|
||||
FastAPI dependency for extracting and validating the current authenticated user.
|
||||
"""
|
||||
import uuid
|
||||
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||
from jose import JWTError
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.auth.jwt import verify_access_token
|
||||
from app.database import get_db
|
||||
from app.models.user import User
|
||||
|
||||
bearer_scheme = HTTPBearer()
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> User:
|
||||
"""
|
||||
Extract Bearer token from Authorization header, verify it,
|
||||
and return the corresponding User from the database.
|
||||
|
||||
Raises:
|
||||
401 HTTPException if token is missing, invalid, or expired.
|
||||
401 HTTPException if the user no longer exists or is inactive.
|
||||
"""
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
try:
|
||||
payload = verify_access_token(credentials.credentials)
|
||||
user_id: str = payload.get("sub")
|
||||
if user_id is None:
|
||||
raise credentials_exception
|
||||
user_uuid = uuid.UUID(user_id)
|
||||
except (JWTError, ValueError):
|
||||
raise credentials_exception
|
||||
|
||||
result = await db.execute(select(User).where(User.id == user_uuid))
|
||||
user = result.scalars().first()
|
||||
|
||||
if user is None:
|
||||
raise credentials_exception
|
||||
|
||||
if not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Inactive user account",
|
||||
)
|
||||
|
||||
return user
|
||||
@@ -0,0 +1,84 @@
|
||||
"""
|
||||
Explicit JWT creation and verification.
|
||||
No ORM magic — all logic is auditable here.
|
||||
"""
|
||||
import hashlib
|
||||
import secrets
|
||||
import uuid
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional
|
||||
|
||||
from jose import JWTError, jwt
|
||||
|
||||
from app.config import settings
|
||||
|
||||
|
||||
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""
|
||||
Create a signed JWT access token.
|
||||
|
||||
Args:
|
||||
data: Payload data to encode (must include 'sub' key).
|
||||
expires_delta: Token lifetime. Defaults to ACCESS_TOKEN_EXPIRE_MINUTES.
|
||||
|
||||
Returns:
|
||||
Encoded JWT string.
|
||||
"""
|
||||
to_encode = data.copy()
|
||||
if expires_delta is not None:
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(timezone.utc) + timedelta(
|
||||
minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
to_encode.update({"exp": expire, "iat": datetime.now(timezone.utc)})
|
||||
encoded_jwt = jwt.encode(
|
||||
to_encode,
|
||||
settings.SECRET_KEY,
|
||||
algorithm=settings.ALGORITHM,
|
||||
)
|
||||
return encoded_jwt
|
||||
|
||||
|
||||
def verify_access_token(token: str) -> dict:
|
||||
"""
|
||||
Verify and decode a JWT access token.
|
||||
|
||||
Args:
|
||||
token: Encoded JWT string.
|
||||
|
||||
Returns:
|
||||
Decoded payload dict.
|
||||
|
||||
Raises:
|
||||
JWTError: If the token is invalid or expired.
|
||||
"""
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
settings.SECRET_KEY,
|
||||
algorithms=[settings.ALGORITHM],
|
||||
)
|
||||
return payload
|
||||
|
||||
|
||||
def hash_refresh_token(plaintext: str) -> str:
|
||||
"""SHA-256 hash a refresh token for storage. Fast is fine — it's already a random secret."""
|
||||
return hashlib.sha256(plaintext.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def create_refresh_token() -> tuple[str, str]:
|
||||
"""
|
||||
Generate a cryptographically secure refresh token.
|
||||
|
||||
Returns:
|
||||
Tuple of (plaintext_token, hashed_token).
|
||||
Store only the hash; send the plaintext to the client.
|
||||
"""
|
||||
plaintext = secrets.token_urlsafe(64)
|
||||
return plaintext, hash_refresh_token(plaintext)
|
||||
|
||||
|
||||
def get_token_expiry(days: Optional[int] = None) -> datetime:
|
||||
"""Return a UTC datetime for token expiry."""
|
||||
expire_days = days if days is not None else settings.REFRESH_TOKEN_EXPIRE_DAYS
|
||||
return datetime.now(timezone.utc) + timedelta(days=expire_days)
|
||||
@@ -0,0 +1,71 @@
|
||||
"""
|
||||
FastAPI dependency helpers for authenticated member access.
|
||||
Member tokens carry role='member' in the JWT payload.
|
||||
"""
|
||||
import uuid
|
||||
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
from jose import JWTError
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.auth.jwt import verify_access_token
|
||||
from app.database import get_db
|
||||
from app.models.member import Member
|
||||
|
||||
bearer_scheme = HTTPBearer()
|
||||
|
||||
|
||||
async def _get_member_from_token(
|
||||
credentials: HTTPAuthorizationCredentials,
|
||||
db: AsyncSession,
|
||||
) -> Member:
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
try:
|
||||
payload = verify_access_token(credentials.credentials)
|
||||
if payload.get("role") != "member":
|
||||
raise credentials_exception
|
||||
member_id: str = payload.get("sub")
|
||||
if member_id is None:
|
||||
raise credentials_exception
|
||||
member_uuid = uuid.UUID(member_id)
|
||||
except (JWTError, ValueError):
|
||||
raise credentials_exception
|
||||
|
||||
result = await db.execute(select(Member).where(Member.id == member_uuid))
|
||||
member = result.scalars().first()
|
||||
|
||||
if member is None:
|
||||
raise credentials_exception
|
||||
|
||||
if not member.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Inactive member account",
|
||||
)
|
||||
|
||||
return member
|
||||
|
||||
|
||||
async def get_authenticated_member(
|
||||
credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> Member:
|
||||
return await _get_member_from_token(credentials, db)
|
||||
|
||||
|
||||
async def get_current_member(
|
||||
member: Member = Depends(get_authenticated_member),
|
||||
) -> Member:
|
||||
if member.member_status != "active":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Member onboarding is not complete.",
|
||||
)
|
||||
return member
|
||||
@@ -0,0 +1,21 @@
|
||||
"""
|
||||
Password hashing and verification using bcrypt directly.
|
||||
"""
|
||||
import bcrypt
|
||||
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
"""Hash a plaintext password using bcrypt."""
|
||||
return bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8")
|
||||
|
||||
|
||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||
"""Verify a plaintext password against a bcrypt hash."""
|
||||
try:
|
||||
return bcrypt.checkpw(
|
||||
plain_password.encode("utf-8"),
|
||||
hashed_password.encode("utf-8"),
|
||||
)
|
||||
except ValueError:
|
||||
# bcrypt 4.x raises for oversized inputs; treat them as invalid credentials.
|
||||
return False
|
||||
@@ -0,0 +1,39 @@
|
||||
from pydantic_settings import BaseSettings
|
||||
from pydantic import field_validator
|
||||
from typing import List
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
DATABASE_URL: str = "postgresql+asyncpg://postgres:postgres@localhost:5432/goodwalk"
|
||||
SECRET_KEY: str = "change-me-to-a-long-random-secret"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 15
|
||||
REFRESH_TOKEN_EXPIRE_DAYS: int = 7
|
||||
ALGORITHM: str = "HS256"
|
||||
ALLOWED_ORIGINS: str = "http://localhost:5173,https://www.goodwalk.co.nz"
|
||||
ENABLE_DOCS: bool = False
|
||||
SITE_URL: str = "http://localhost:5173"
|
||||
MEMBERS_URL: str = "http://localhost:5173/members"
|
||||
|
||||
# Email — set EMAIL_BACKEND=console (default) to print to stdout during dev
|
||||
EMAIL_BACKEND: str = "console"
|
||||
SMTP_HOST: str = ""
|
||||
SMTP_PORT: int = 587
|
||||
SMTP_USE_TLS: bool = True
|
||||
SMTP_USER: str = ""
|
||||
SMTP_PASSWORD: str = ""
|
||||
EMAIL_FROM: str = "noreply@goodwalk.co.nz"
|
||||
|
||||
@field_validator("ALLOWED_ORIGINS", mode="before")
|
||||
@classmethod
|
||||
def parse_allowed_origins(cls, v: str) -> str:
|
||||
# Keep as string; we parse to list via property
|
||||
return v
|
||||
|
||||
@property
|
||||
def allowed_origins_list(self) -> List[str]:
|
||||
return [origin.strip() for origin in self.ALLOWED_ORIGINS.split(",") if origin.strip()]
|
||||
|
||||
model_config = {"env_file": ".env", "extra": "ignore"}
|
||||
|
||||
|
||||
settings = Settings()
|
||||
@@ -0,0 +1,29 @@
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
|
||||
from typing import AsyncGenerator
|
||||
from app.config import settings
|
||||
|
||||
engine = create_async_engine(
|
||||
settings.DATABASE_URL,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
)
|
||||
|
||||
AsyncSessionLocal = async_sessionmaker(
|
||||
bind=engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
autoflush=False,
|
||||
autocommit=False,
|
||||
)
|
||||
|
||||
|
||||
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
async with AsyncSessionLocal() as session:
|
||||
try:
|
||||
yield session
|
||||
await session.commit()
|
||||
except Exception:
|
||||
await session.rollback()
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
@@ -0,0 +1,3 @@
|
||||
from app.experiments.registry import EXPERIMENT_REGISTRY
|
||||
|
||||
__all__ = ["EXPERIMENT_REGISTRY"]
|
||||
@@ -0,0 +1,46 @@
|
||||
EXPERIMENT_REGISTRY = {
|
||||
"homepage_hero_test": {
|
||||
"experiment_key": "homepage_hero_test",
|
||||
"cookie_name": "exp_homepage_hero",
|
||||
"name": "Homepage hero test",
|
||||
"description": "Homepage hero messaging and CTA emphasis.",
|
||||
"enabled": True,
|
||||
"eligible_routes": ["/"],
|
||||
"variants": [
|
||||
{
|
||||
"variant_key": "control",
|
||||
"label": "Current hero copy",
|
||||
"allocation": 50,
|
||||
"is_control": True,
|
||||
},
|
||||
{
|
||||
"variant_key": "tiny_gang_social_proof",
|
||||
"label": "Tiny Gang social proof",
|
||||
"allocation": 50,
|
||||
"is_control": False,
|
||||
},
|
||||
],
|
||||
},
|
||||
"pricing_cta_test": {
|
||||
"experiment_key": "pricing_cta_test",
|
||||
"cookie_name": "exp_pricing_cta",
|
||||
"name": "Pricing CTA test",
|
||||
"description": "Pricing CTA emphasis without changing page meaning.",
|
||||
"enabled": True,
|
||||
"eligible_routes": ["/our-pricing"],
|
||||
"variants": [
|
||||
{
|
||||
"variant_key": "control",
|
||||
"label": "Book now CTA",
|
||||
"allocation": 50,
|
||||
"is_control": True,
|
||||
},
|
||||
{
|
||||
"variant_key": "meet_greet_emphasis",
|
||||
"label": "Meet and greet emphasis",
|
||||
"allocation": 50,
|
||||
"is_control": False,
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,145 @@
|
||||
import asyncio
|
||||
import traceback
|
||||
import uuid
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
from jose import JWTError
|
||||
from slowapi import _rate_limit_exceeded_handler
|
||||
from slowapi.errors import RateLimitExceeded
|
||||
from sqlalchemy import select
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
|
||||
from app.config import settings
|
||||
from app.database import AsyncSessionLocal, engine
|
||||
from app.services.experiments import sync_experiment_registry
|
||||
from app.services.notifications import notification_automation_loop
|
||||
from app.middleware.rate_limit import limiter
|
||||
from app.middleware.logging import RequestLogMiddleware
|
||||
from app.routers import auth, pages, posts, settings as settings_router, sections
|
||||
from app.routers import analytics as analytics_router
|
||||
from app.routers import audit as audit_router
|
||||
from app.routers import contact as contact_router
|
||||
from app.routers import experiments as experiments_router
|
||||
from app.routers import members as members_router
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
async with AsyncSessionLocal() as session:
|
||||
await sync_experiment_registry(session)
|
||||
await session.commit()
|
||||
notification_task = asyncio.create_task(notification_automation_loop())
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
notification_task.cancel()
|
||||
with suppress(asyncio.CancelledError):
|
||||
await notification_task
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
class SecurityHeadersMiddleware(BaseHTTPMiddleware):
|
||||
"""Apply baseline browser-facing hardening headers to every response."""
|
||||
|
||||
async def dispatch(self, request: Request, call_next):
|
||||
response = await call_next(request)
|
||||
response.headers["X-Content-Type-Options"] = "nosniff"
|
||||
response.headers["X-Frame-Options"] = "DENY"
|
||||
response.headers["Content-Security-Policy"] = (
|
||||
"default-src 'none'; frame-ancestors 'none'; base-uri 'none'; form-action 'none'"
|
||||
)
|
||||
response.headers["Strict-Transport-Security"] = (
|
||||
"max-age=31536000; includeSubDomains"
|
||||
)
|
||||
response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin"
|
||||
return response
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title="Goodwalk CMS API",
|
||||
version="1.0.0",
|
||||
description="CMS API for the Goodwalk marketing site",
|
||||
lifespan=lifespan,
|
||||
docs_url="/docs" if settings.ENABLE_DOCS else None,
|
||||
redoc_url="/redoc" if settings.ENABLE_DOCS else None,
|
||||
)
|
||||
|
||||
# Rate limiter
|
||||
app.state.limiter = limiter
|
||||
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
||||
|
||||
# CORS
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=settings.allowed_origins_list,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
app.add_middleware(SecurityHeadersMiddleware)
|
||||
app.add_middleware(RequestLogMiddleware)
|
||||
|
||||
# Routers — all under /api/v1/
|
||||
API_PREFIX = "/api/v1"
|
||||
|
||||
app.include_router(pages.router, prefix=API_PREFIX)
|
||||
app.include_router(posts.router, prefix=API_PREFIX)
|
||||
app.include_router(settings_router.router, prefix=API_PREFIX)
|
||||
app.include_router(auth.router, prefix=API_PREFIX)
|
||||
|
||||
# Legacy-compatible section endpoints (no /api/v1 prefix — paths match existing frontend)
|
||||
app.include_router(sections.router)
|
||||
|
||||
# Analytics — ingest endpoint is public (/api/analytics/event), summary is authed (/api/v1/analytics/summary)
|
||||
app.include_router(analytics_router.router)
|
||||
app.include_router(contact_router.router)
|
||||
app.include_router(experiments_router.router)
|
||||
app.include_router(members_router.router, prefix=API_PREFIX)
|
||||
app.include_router(audit_router.router, prefix=API_PREFIX)
|
||||
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def unhandled_exception_handler(request: Request, exc: Exception):
|
||||
"""Catch unhandled exceptions, log them for authenticated members, return 500."""
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
if auth_header.startswith("Bearer "):
|
||||
token = auth_header[7:]
|
||||
try:
|
||||
from app.auth.jwt import verify_access_token
|
||||
from app.models.member import Member
|
||||
from app.services.audit import log_audit
|
||||
|
||||
payload = verify_access_token(token)
|
||||
if payload.get("role") == "member":
|
||||
member_uuid = uuid.UUID(payload["sub"])
|
||||
async with AsyncSessionLocal() as session:
|
||||
result = await session.execute(
|
||||
select(Member).where(Member.id == member_uuid)
|
||||
)
|
||||
member = result.scalars().first()
|
||||
await log_audit(
|
||||
session,
|
||||
member_id=member_uuid,
|
||||
member_email=member.email if member else None,
|
||||
action_type="error",
|
||||
area=str(request.url.path),
|
||||
description=f"Unhandled error: {type(exc).__name__}",
|
||||
status="error",
|
||||
error_message=str(exc)[:500],
|
||||
error_detail=traceback.format_exc()[:4000],
|
||||
ip_address=request.client.host if request.client else None,
|
||||
user_agent=request.headers.get("User-Agent"),
|
||||
)
|
||||
await session.commit()
|
||||
except (JWTError, ValueError, Exception):
|
||||
pass # Never let audit logging suppress the original error response
|
||||
|
||||
return JSONResponse(status_code=500, content={"error": "Internal server error"})
|
||||
|
||||
|
||||
@app.get("/health", tags=["Health"])
|
||||
async def health_check():
|
||||
return {"status": "ok"}
|
||||
@@ -0,0 +1,293 @@
|
||||
"""
|
||||
Request logging middleware.
|
||||
|
||||
Prints a clean, colour-coded line for every meaningful HTTP request.
|
||||
Context-aware: pulls the email, member status, service type, etc. from the
|
||||
request body for the most important endpoints so you can read the log without
|
||||
needing to replay the request.
|
||||
|
||||
Format
|
||||
------
|
||||
METHOD /path/to/endpoint STATUS timing origin
|
||||
↳ human-readable context (when relevant)
|
||||
|
||||
Localhost / loopback addresses are rendered as local:PORT rather than the
|
||||
raw IP.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import json
|
||||
import sys
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from rich.console import Console
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.requests import Request
|
||||
|
||||
# Force UTF-8 on Windows so arrow / symbol characters render correctly in any
|
||||
# terminal (Windows Terminal, VS Code, PowerShell). On other platforms the
|
||||
# default encoding is already UTF-8.
|
||||
def _make_console() -> Console:
|
||||
if sys.platform == "win32" and hasattr(sys.stdout, "buffer"):
|
||||
out = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8", line_buffering=True)
|
||||
return Console(highlight=False, markup=True, file=out)
|
||||
return Console(highlight=False, markup=True)
|
||||
|
||||
_console = _make_console()
|
||||
|
||||
# ── Paths that are too noisy to log ───────────────────────────────────────────
|
||||
|
||||
_SKIP = frozenset({"/health", "/favicon.ico", "/robots.txt"})
|
||||
_BODY_METHODS = frozenset({"POST", "PUT", "PATCH"})
|
||||
|
||||
# ── Colour maps ───────────────────────────────────────────────────────────────
|
||||
|
||||
_METHOD_STYLE: dict[str, str] = {
|
||||
"GET": "bold #6ea8fe", # soft blue
|
||||
"POST": "bold #75b798", # soft green
|
||||
"PUT": "bold #e6a817", # amber
|
||||
"PATCH": "bold #c586c0", # lavender
|
||||
"DELETE": "bold #f28b82", # soft red
|
||||
"HEAD": "dim",
|
||||
"OPTIONS": "dim",
|
||||
}
|
||||
|
||||
|
||||
def _status_style(code: int) -> str:
|
||||
if code < 300:
|
||||
return "bold green"
|
||||
if code < 400:
|
||||
return "bold cyan"
|
||||
if code < 500:
|
||||
return "bold yellow"
|
||||
return "bold red"
|
||||
|
||||
|
||||
def _timing_style(ms: float) -> str:
|
||||
if ms < 200:
|
||||
return "white"
|
||||
if ms < 1_000:
|
||||
return "yellow"
|
||||
return "bold red"
|
||||
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
_LOCAL_HOSTS = {"127.0.0.1", "::1", "0.0.0.0", "localhost", "::ffff:127.0.0.1"}
|
||||
|
||||
|
||||
def _origin(request: Request) -> str:
|
||||
host = request.client.host if request.client else "unknown"
|
||||
port = request.url.port or 8000
|
||||
if host in _LOCAL_HOSTS:
|
||||
return f"[dim]local:{port}[/dim]"
|
||||
return f"[dim]{host}[/dim]"
|
||||
|
||||
|
||||
def _body(raw: bytes) -> Optional[dict]:
|
||||
if not raw:
|
||||
return None
|
||||
try:
|
||||
obj = json.loads(raw)
|
||||
return obj if isinstance(obj, dict) else None
|
||||
except (json.JSONDecodeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _shorten(path: str, width: int = 58) -> str:
|
||||
"""Left-pad the path to *width* chars, truncating with ellipsis if needed."""
|
||||
if len(path) > width:
|
||||
path = path[: width - 1] + "…"
|
||||
return f"{path:<{width}}"
|
||||
|
||||
|
||||
# ── Context extraction ────────────────────────────────────────────────────────
|
||||
# Each branch returns a *markup* string (may contain [colour] tags) or None.
|
||||
|
||||
def _context(path: str, method: str, data: Optional[dict], status: int) -> Optional[str]: # noqa: C901
|
||||
d = data or {}
|
||||
em = d.get("email", "")
|
||||
|
||||
# ── Admin auth ────────────────────────────────────────────────────────────
|
||||
if path.endswith("/auth/login") and "/members/" not in path:
|
||||
if status < 400:
|
||||
return f"[dim]admin[/dim] · {em}"
|
||||
return f"[red]✗[/red] bad credentials · {em}"
|
||||
|
||||
# ── Member auth ───────────────────────────────────────────────────────────
|
||||
if "/members/auth/login/verify" in path:
|
||||
if status < 400:
|
||||
return f"[dim]member 2FA[/dim] · {em} · [green]verified ✓[/green]"
|
||||
return f"[red]✗[/red] bad 2FA code · {em}"
|
||||
|
||||
if "/members/auth/login" in path and path.endswith("/login"):
|
||||
if status < 400:
|
||||
return f"[dim]member login[/dim] · {em}"
|
||||
return f"[red]✗[/red] bad password · {em}"
|
||||
|
||||
if "/members/auth/refresh" in path:
|
||||
return None # token rotation — no useful body detail
|
||||
|
||||
# ── Claim flow ────────────────────────────────────────────────────────────
|
||||
if "/members/claim/request" in path:
|
||||
return f"[dim]claim request[/dim] · {em}"
|
||||
|
||||
if "/members/claim/complete" in path:
|
||||
if status < 400:
|
||||
return f"[green]account claimed[/green] · {em}"
|
||||
return f"[red]✗[/red] claim failed · {em}"
|
||||
|
||||
# ── Member: profile ───────────────────────────────────────────────────────
|
||||
if method == "PUT" and path.endswith("/members/me"):
|
||||
fields = [k for k in d]
|
||||
if fields:
|
||||
return "[dim]updated[/dim] · " + ", ".join(fields)
|
||||
return None
|
||||
|
||||
# ── Member: onboarding ────────────────────────────────────────────────────
|
||||
if "/members/onboarding/contract" in path:
|
||||
signer = d.get("signer_name", "")
|
||||
if status < 400:
|
||||
return f"[green]contract signed[/green] · {signer}"
|
||||
return None
|
||||
|
||||
if method == "PUT" and "/members/onboarding" in path:
|
||||
if d.get("complete_onboarding"):
|
||||
return "[dim]onboarding complete[/dim] → [yellow]pending_contract[/yellow]"
|
||||
return None
|
||||
|
||||
# ── Member: bookings ──────────────────────────────────────────────────────
|
||||
if method == "POST" and path.endswith("/members/bookings"):
|
||||
svc = d.get("service_type", "")
|
||||
notes = d.get("notes", "")
|
||||
label = _service_label(svc)
|
||||
parts = [label] + ([notes[:50]] if notes else [])
|
||||
return "[dim]booking request[/dim] · " + " · ".join(p for p in parts if p)
|
||||
|
||||
# ── Admin: create member ──────────────────────────────────────────────────
|
||||
if (
|
||||
method == "POST"
|
||||
and "/admin/members" in path
|
||||
and not any(seg in path for seg in ("/activate", "/walks", "/bookings", "/messages"))
|
||||
):
|
||||
first = d.get("first_name", "")
|
||||
last = d.get("last_name", "")
|
||||
em2 = d.get("email", "")
|
||||
name = f"{first} {last}".strip()
|
||||
parts = [n for n in (name, em2) if n]
|
||||
return "[dim]new member[/dim] · " + " · ".join(parts)
|
||||
|
||||
if method == "POST" and "/admin/members/" in path and path.endswith("/activate"):
|
||||
return "status → [green]active ✓[/green]"
|
||||
|
||||
if method == "PUT" and "/admin/members/" in path:
|
||||
s = d.get("member_status")
|
||||
if s:
|
||||
return f"status → [cyan]{s}[/cyan]"
|
||||
return None
|
||||
|
||||
# ── Admin: walks ──────────────────────────────────────────────────────────
|
||||
if method == "POST" and "/admin/walks" in path:
|
||||
svc = _service_label(d.get("service_type", ""))
|
||||
dur = d.get("duration_minutes", "")
|
||||
parts = [svc] + ([f"{dur} min"] if dur else [])
|
||||
return "[dim]walk recorded[/dim] · " + " · ".join(p for p in parts if p)
|
||||
|
||||
# ── Admin: messages ───────────────────────────────────────────────────────
|
||||
if method == "POST" and "/admin/messages" in path:
|
||||
subject = d.get("subject", "")
|
||||
return f"[dim]message sent[/dim] · {subject}" if subject else "[dim]message sent[/dim]"
|
||||
|
||||
# ── Admin: bookings ───────────────────────────────────────────────────────
|
||||
if method == "PUT" and "/admin/bookings/" in path:
|
||||
s = d.get("status")
|
||||
if s:
|
||||
_colour = {"confirmed": "green", "cancelled": "red", "completed": "cyan"}.get(s, "yellow")
|
||||
return f"status → [{_colour}]{s}[/{_colour}]"
|
||||
if d.get("admin_notes"):
|
||||
return "[dim]admin notes updated[/dim]"
|
||||
return None
|
||||
|
||||
# ── Admin: notifications ──────────────────────────────────────────────────
|
||||
if method == "POST" and "/admin/notifications/run" in path:
|
||||
return "[dim]notification run triggered[/dim]"
|
||||
|
||||
if method == "PUT" and "/admin/notifications/settings" in path:
|
||||
keys = list(d.keys())
|
||||
return "[dim]settings updated[/dim] · " + ", ".join(keys) if keys else None
|
||||
|
||||
# ── Contact leads ─────────────────────────────────────────────────────────
|
||||
if "/contact" in path and method == "POST":
|
||||
name = d.get("full_name") or d.get("name", "")
|
||||
pet = d.get("pet_name", "")
|
||||
email_fallback = d.get("email", "")
|
||||
parts = [name or email_fallback] + ([f"dog: {pet}"] if pet else [])
|
||||
return "[dim]lead[/dim] · " + " · ".join(p for p in parts if p)
|
||||
|
||||
# ── Generic 4xx hints ─────────────────────────────────────────────────────
|
||||
if status == 401:
|
||||
return "[red]✗[/red] unauthorized"
|
||||
if status == 403:
|
||||
return "[red]✗[/red] forbidden"
|
||||
if status == 422:
|
||||
return "[yellow]⚠[/yellow] validation error"
|
||||
if status == 429:
|
||||
return "[yellow]⚠[/yellow] rate limited"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _service_label(svc: str) -> str:
|
||||
return {"pack_walk": "Pack Walk", "1_1_walk": "1-1 Walk", "puppy_visit": "Puppy Visit"}.get(svc, svc)
|
||||
|
||||
|
||||
# ── Middleware ────────────────────────────────────────────────────────────────
|
||||
|
||||
class RequestLogMiddleware(BaseHTTPMiddleware):
|
||||
"""
|
||||
Logs every non-trivial HTTP request to the console via Rich.
|
||||
|
||||
Body reads are cached by Starlette's Request.body() so downstream
|
||||
handlers always see the full body unchanged.
|
||||
"""
|
||||
|
||||
async def dispatch(self, request: Request, call_next):
|
||||
path = request.url.path
|
||||
method = request.method
|
||||
|
||||
if path in _SKIP or method == "OPTIONS":
|
||||
return await call_next(request)
|
||||
|
||||
# Read and cache body before handing to the route handler.
|
||||
raw = b""
|
||||
if method in _BODY_METHODS:
|
||||
raw = await request.body() # Starlette caches in request._body
|
||||
|
||||
data = _body(raw)
|
||||
|
||||
t0 = time.perf_counter()
|
||||
response = await call_next(request)
|
||||
elapsed = (time.perf_counter() - t0) * 1_000
|
||||
|
||||
status = response.status_code
|
||||
ctx = _context(path, method, data, status)
|
||||
origin = _origin(request)
|
||||
|
||||
method_w = f"{method:<7}"
|
||||
timing = f"{elapsed:>7.1f}ms"
|
||||
path_w = _shorten(path)
|
||||
|
||||
method_styled = f"[{_METHOD_STYLE.get(method, 'white')}]{method_w}[/]"
|
||||
status_styled = f"[{_status_style(status)}]{status}[/]"
|
||||
timing_styled = f"[{_timing_style(elapsed)}]{timing}[/]"
|
||||
|
||||
_console.print(
|
||||
f" {method_styled} {path_w} {status_styled} {timing_styled} {origin}"
|
||||
)
|
||||
if ctx:
|
||||
_console.print(f" [dim]↳[/dim] {ctx}")
|
||||
|
||||
return response
|
||||
@@ -0,0 +1,8 @@
|
||||
"""
|
||||
slowapi rate limiter setup.
|
||||
Import `limiter` here and attach it to the FastAPI app in main.py.
|
||||
"""
|
||||
from slowapi import Limiter
|
||||
from slowapi.util import get_remote_address
|
||||
|
||||
limiter = Limiter(key_func=get_remote_address, headers_enabled=True)
|
||||
@@ -0,0 +1,44 @@
|
||||
from app.models.base import Base
|
||||
from app.models.page import Page
|
||||
from app.models.post import BlogPost
|
||||
from app.models.settings import SiteSettings
|
||||
from app.models.user import User, RefreshToken
|
||||
from app.models.section import ContentSection
|
||||
from app.models.analytics import AnalyticsEvent
|
||||
from app.models.experiment import Experiment, ExperimentVariant, ExperimentEvent
|
||||
from app.models.member import (
|
||||
Member,
|
||||
MemberVerificationCode,
|
||||
MemberRefreshToken,
|
||||
MagicLinkToken,
|
||||
Walk,
|
||||
Booking,
|
||||
AdminMessage,
|
||||
MemberNotificationDispatch,
|
||||
)
|
||||
from app.models.contact_lead import ContactLead
|
||||
from app.models.audit import AuditLog
|
||||
|
||||
__all__ = [
|
||||
"Base",
|
||||
"Page",
|
||||
"BlogPost",
|
||||
"SiteSettings",
|
||||
"User",
|
||||
"RefreshToken",
|
||||
"ContentSection",
|
||||
"AnalyticsEvent",
|
||||
"Experiment",
|
||||
"ExperimentVariant",
|
||||
"ExperimentEvent",
|
||||
"Member",
|
||||
"MemberVerificationCode",
|
||||
"MemberRefreshToken",
|
||||
"MagicLinkToken",
|
||||
"Walk",
|
||||
"Booking",
|
||||
"AdminMessage",
|
||||
"MemberNotificationDispatch",
|
||||
"ContactLead",
|
||||
"AuditLog",
|
||||
]
|
||||
@@ -0,0 +1,29 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import String, DateTime, func, JSON
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from app.models.base import Base, UUIDMixin
|
||||
|
||||
|
||||
class AnalyticsEvent(Base, UUIDMixin):
|
||||
__tablename__ = "analytics_events"
|
||||
|
||||
event_type: Mapped[str] = mapped_column(String(64), nullable=False, index=True)
|
||||
page: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
||||
element: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
metadata_: Mapped[dict | None] = mapped_column("metadata", JSON, nullable=True)
|
||||
session_id: Mapped[str] = mapped_column(String(64), nullable=False, index=True)
|
||||
ip_hash: Mapped[str | None] = mapped_column(String(64), nullable=True)
|
||||
ip_partial: Mapped[str | None] = mapped_column(String(24), nullable=True)
|
||||
user_agent: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
browser: Mapped[str | None] = mapped_column(String(100), nullable=True)
|
||||
os_name: Mapped[str | None] = mapped_column(String(100), nullable=True)
|
||||
country: Mapped[str | None] = mapped_column(String(100), nullable=True)
|
||||
city: Mapped[str | None] = mapped_column(String(100), nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
server_default=func.now(),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
@@ -0,0 +1,69 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Index, String, Text, JSON, func
|
||||
from sqlalchemy import Uuid
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.models.base import Base, UUIDMixin
|
||||
|
||||
|
||||
class AuditLog(Base, UUIDMixin):
|
||||
"""Immutable record of member activity and application errors."""
|
||||
|
||||
__tablename__ = "audit_logs"
|
||||
__table_args__ = (
|
||||
Index("ix_audit_logs_timestamp", "timestamp"),
|
||||
Index("ix_audit_logs_member_id", "member_id"),
|
||||
Index("ix_audit_logs_action_type", "action_type"),
|
||||
Index("ix_audit_logs_status", "status"),
|
||||
)
|
||||
|
||||
timestamp: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), server_default=func.now(), nullable=False
|
||||
)
|
||||
# Nullable FK — SET NULL if member is deleted so the log is preserved.
|
||||
member_id: Mapped[Optional[uuid.UUID]] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
ForeignKey("members.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
)
|
||||
# Denormalised for readability after member deletion.
|
||||
member_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
|
||||
# One of: login, logout, page_visit, booking_created, booking_cancelled,
|
||||
# profile_updated, onboarding_updated, contract_signed,
|
||||
# account_claimed, message_read, error
|
||||
action_type: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
|
||||
# Identifies the page / feature area, e.g. "members/dashboard"
|
||||
area: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
|
||||
# Human-readable one-liner
|
||||
description: Mapped[str] = mapped_column(String(500), nullable=False)
|
||||
|
||||
# success | warning | error
|
||||
status: Mapped[str] = mapped_column(String(16), nullable=False, default="success")
|
||||
|
||||
# Optional related booking — SET NULL if booking is deleted.
|
||||
booking_id: Mapped[Optional[uuid.UUID]] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
ForeignKey("bookings.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
# Error detail — populated for action_type='error' records.
|
||||
error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||
error_detail: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||
|
||||
# Request metadata
|
||||
ip_address: Mapped[Optional[str]] = mapped_column(String(64), nullable=True)
|
||||
user_agent: Mapped[Optional[str]] = mapped_column(String(512), nullable=True)
|
||||
|
||||
# Catch-all JSON for any extra context (e.g. booking service_type)
|
||||
extra: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True)
|
||||
|
||||
member: Mapped[Optional["Member"]] = relationship( # type: ignore[name-defined]
|
||||
"Member", foreign_keys=[member_id]
|
||||
)
|
||||
@@ -0,0 +1,30 @@
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from sqlalchemy import DateTime, func, Uuid
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
class TimestampMixin:
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
server_default=func.now(),
|
||||
nullable=False,
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
server_default=func.now(),
|
||||
onupdate=func.now(),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
|
||||
class UUIDMixin:
|
||||
id: Mapped[uuid.UUID] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
primary_key=True,
|
||||
default=uuid.uuid4,
|
||||
)
|
||||
@@ -0,0 +1,34 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, JSON, String, Text, func
|
||||
from sqlalchemy import Uuid
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from app.models.base import Base, UUIDMixin, TimestampMixin
|
||||
|
||||
|
||||
class ContactLead(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "contact_leads"
|
||||
|
||||
full_name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
email: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
||||
phone: Mapped[Optional[str]] = mapped_column(String(50), nullable=True)
|
||||
requested_services: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
pet_name: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
|
||||
pet_breed: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
|
||||
suburb: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
|
||||
service_area_status: Mapped[Optional[str]] = mapped_column(String(32), nullable=True)
|
||||
message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||
source: Mapped[str] = mapped_column(String(50), nullable=False, default="contact_form")
|
||||
status: Mapped[str] = mapped_column(String(32), nullable=False, default="invite", index=True)
|
||||
notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||
metadata_json: Mapped[Optional[dict]] = mapped_column("metadata", JSON, nullable=True)
|
||||
invited_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
invited_member_id: Mapped[Optional[uuid.UUID]] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
ForeignKey("members.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
@@ -0,0 +1,68 @@
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
|
||||
from sqlalchemy import Boolean, DateTime, ForeignKey, Index, Integer, JSON, Numeric, String, UniqueConstraint, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.models.base import Base, TimestampMixin, UUIDMixin
|
||||
|
||||
|
||||
class Experiment(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "experiments"
|
||||
|
||||
experiment_key: Mapped[str] = mapped_column(String(64), nullable=False, unique=True, index=True)
|
||||
cookie_name: Mapped[str] = mapped_column(String(96), nullable=False, unique=True)
|
||||
name: Mapped[str] = mapped_column(String(120), nullable=False)
|
||||
description: Mapped[str | None] = mapped_column(String(512), nullable=True)
|
||||
enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, index=True)
|
||||
eligible_routes: Mapped[list[str]] = mapped_column(JSON, nullable=False, default=list)
|
||||
|
||||
variants: Mapped[list["ExperimentVariant"]] = relationship(
|
||||
back_populates="experiment",
|
||||
cascade="all, delete-orphan",
|
||||
passive_deletes=True,
|
||||
order_by="ExperimentVariant.variant_key",
|
||||
)
|
||||
|
||||
|
||||
class ExperimentVariant(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "experiment_variants"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("experiment_id", "variant_key", name="uq_experiment_variants_experiment_variant"),
|
||||
)
|
||||
|
||||
experiment_id: Mapped[str] = mapped_column(
|
||||
ForeignKey("experiments.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
variant_key: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
label: Mapped[str] = mapped_column(String(120), nullable=False)
|
||||
allocation: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
is_control: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
|
||||
|
||||
experiment: Mapped[Experiment] = relationship(back_populates="variants")
|
||||
|
||||
|
||||
class ExperimentEvent(Base, UUIDMixin):
|
||||
__tablename__ = "experiment_events"
|
||||
__table_args__ = (
|
||||
Index("ix_experiment_events_experiment_variant_created_at", "experiment_key", "variant_key", "created_at"),
|
||||
Index("ix_experiment_events_session_created_at", "session_id", "created_at"),
|
||||
)
|
||||
|
||||
experiment_key: Mapped[str] = mapped_column(String(64), nullable=False, index=True)
|
||||
variant_key: Mapped[str] = mapped_column(String(64), nullable=False, index=True)
|
||||
session_id: Mapped[str] = mapped_column(String(128), nullable=False, index=True)
|
||||
user_id: Mapped[str | None] = mapped_column(String(64), nullable=True, index=True)
|
||||
path: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
||||
event_type: Mapped[str] = mapped_column(String(64), nullable=False, index=True)
|
||||
conversion_value: Mapped[Decimal | None] = mapped_column(Numeric(12, 2), nullable=True)
|
||||
metadata_: Mapped[dict | None] = mapped_column("metadata", JSON, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
nullable=False,
|
||||
index=True,
|
||||
default=datetime.utcnow,
|
||||
server_default=func.now(),
|
||||
)
|
||||
@@ -0,0 +1,192 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from sqlalchemy import String, Boolean, DateTime, ForeignKey, Text, JSON, func, UniqueConstraint
|
||||
from sqlalchemy import Uuid
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from app.models.base import Base, UUIDMixin, TimestampMixin
|
||||
|
||||
|
||||
class Member(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "members"
|
||||
|
||||
email: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True)
|
||||
hashed_password: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
first_name: Mapped[str] = mapped_column(String(100), nullable=False)
|
||||
last_name: Mapped[str] = mapped_column(String(100), nullable=False)
|
||||
phone: Mapped[Optional[str]] = mapped_column(String(50), nullable=True)
|
||||
address: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
||||
emergency_contact: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
is_claimed: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
|
||||
is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
|
||||
notifications_enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
|
||||
member_status: Mapped[str] = mapped_column(String(32), default="invited", nullable=False, index=True)
|
||||
claimed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
onboarding_completed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
contract_signed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
contract_signer_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
contract_version: Mapped[Optional[str]] = mapped_column(String(50), nullable=True)
|
||||
activated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
onboarding_data: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True)
|
||||
service_pricing_overrides: Mapped[dict] = mapped_column(JSON, nullable=False, default=dict)
|
||||
force_two_factor: Mapped[Optional[bool]] = mapped_column(Boolean, nullable=True)
|
||||
|
||||
verification_codes: Mapped[list["MemberVerificationCode"]] = relationship(
|
||||
"MemberVerificationCode", back_populates="member", cascade="all, delete-orphan"
|
||||
)
|
||||
refresh_tokens: Mapped[list["MemberRefreshToken"]] = relationship(
|
||||
"MemberRefreshToken", back_populates="member", cascade="all, delete-orphan"
|
||||
)
|
||||
walks: Mapped[list["Walk"]] = relationship(
|
||||
"Walk", back_populates="member", cascade="all, delete-orphan"
|
||||
)
|
||||
bookings: Mapped[list["Booking"]] = relationship(
|
||||
"Booking", back_populates="member", cascade="all, delete-orphan"
|
||||
)
|
||||
messages: Mapped[list["AdminMessage"]] = relationship(
|
||||
"AdminMessage", back_populates="member", cascade="all, delete-orphan"
|
||||
)
|
||||
notification_dispatches: Mapped[list["MemberNotificationDispatch"]] = relationship(
|
||||
"MemberNotificationDispatch", back_populates="member", cascade="all, delete-orphan"
|
||||
)
|
||||
magic_link_tokens: Mapped[list["MagicLinkToken"]] = relationship(
|
||||
"MagicLinkToken", back_populates="member", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
|
||||
class MemberVerificationCode(Base, UUIDMixin):
|
||||
__tablename__ = "member_verification_codes"
|
||||
|
||||
member_id: Mapped[uuid.UUID] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
ForeignKey("members.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
code_hash: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
purpose: Mapped[str] = mapped_column(String(20), nullable=False) # "claim" | "login_2fa"
|
||||
expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
used_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), server_default=func.now(), nullable=False
|
||||
)
|
||||
|
||||
member: Mapped["Member"] = relationship("Member", back_populates="verification_codes")
|
||||
|
||||
|
||||
class MemberRefreshToken(Base, UUIDMixin):
|
||||
__tablename__ = "member_refresh_tokens"
|
||||
|
||||
member_id: Mapped[uuid.UUID] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
ForeignKey("members.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
token_hash: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
revoked: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), server_default=func.now(), nullable=False
|
||||
)
|
||||
|
||||
member: Mapped["Member"] = relationship("Member", back_populates="refresh_tokens")
|
||||
|
||||
|
||||
class Walk(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "walks"
|
||||
|
||||
member_id: Mapped[uuid.UUID] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
ForeignKey("members.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
walked_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
service_type: Mapped[str] = mapped_column(String(50), nullable=False) # pack_walk | 1_1_walk | puppy_visit
|
||||
duration_minutes: Mapped[int] = mapped_column(nullable=False, default=60)
|
||||
notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||
recorded_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
|
||||
member: Mapped["Member"] = relationship("Member", back_populates="walks")
|
||||
|
||||
|
||||
class Booking(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "bookings"
|
||||
|
||||
member_id: Mapped[uuid.UUID] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
ForeignKey("members.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
service_type: Mapped[str] = mapped_column(String(50), nullable=False)
|
||||
requested_date: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
status: Mapped[str] = mapped_column(String(20), nullable=False, default="pending") # pending | confirmed | cancelled | completed
|
||||
notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||
admin_notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
||||
|
||||
member: Mapped["Member"] = relationship("Member", back_populates="bookings")
|
||||
|
||||
|
||||
class AdminMessage(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "admin_messages"
|
||||
|
||||
member_id: Mapped[uuid.UUID] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
ForeignKey("members.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
subject: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
body: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
read_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
sent_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
# "inbound" = admin → member, "outbound" = member reply
|
||||
direction: Mapped[str] = mapped_column(String(16), nullable=False, default="inbound")
|
||||
reply_to_id: Mapped[Optional[uuid.UUID]] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
ForeignKey("admin_messages.id", ondelete="SET NULL"),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
member: Mapped["Member"] = relationship("Member", back_populates="messages")
|
||||
|
||||
|
||||
class MagicLinkToken(Base, UUIDMixin):
|
||||
__tablename__ = "member_magic_link_tokens"
|
||||
|
||||
member_id: Mapped[uuid.UUID] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
ForeignKey("members.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
token_hash: Mapped[str] = mapped_column(String(255), nullable=False, unique=True, index=True)
|
||||
expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
used_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), server_default=func.now(), nullable=False
|
||||
)
|
||||
|
||||
member: Mapped["Member"] = relationship("Member", back_populates="magic_link_tokens")
|
||||
|
||||
|
||||
class MemberNotificationDispatch(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "member_notification_dispatches"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("member_id", "dispatch_key", name="uq_member_notification_dispatches_member_key"),
|
||||
)
|
||||
|
||||
member_id: Mapped[uuid.UUID] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
ForeignKey("members.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
notification_type: Mapped[str] = mapped_column(String(64), nullable=False, index=True)
|
||||
dispatch_key: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
metadata_json: Mapped[Optional[dict]] = mapped_column("metadata", JSON, nullable=True)
|
||||
|
||||
member: Mapped["Member"] = relationship("Member", back_populates="notification_dispatches")
|
||||
@@ -0,0 +1,19 @@
|
||||
from sqlalchemy import String, Text, Boolean, Index
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from app.models.base import Base, UUIDMixin, TimestampMixin
|
||||
|
||||
|
||||
class Page(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "pages"
|
||||
|
||||
title: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
slug: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
|
||||
body: Mapped[str] = mapped_column(Text, nullable=False, default="")
|
||||
meta_title: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
meta_description: Mapped[str | None] = mapped_column(String(500), nullable=True)
|
||||
og_image_url: Mapped[str | None] = mapped_column(String(2048), nullable=True)
|
||||
published: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_pages_slug", "slug"),
|
||||
)
|
||||
@@ -0,0 +1,24 @@
|
||||
from typing import List
|
||||
from sqlalchemy import String, Text, Boolean, Index, JSON
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy.dialects.postgresql import ARRAY
|
||||
from sqlalchemy import text
|
||||
from app.models.base import Base, UUIDMixin, TimestampMixin
|
||||
|
||||
|
||||
class BlogPost(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "blog_posts"
|
||||
|
||||
title: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
slug: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
|
||||
excerpt: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
body: Mapped[str] = mapped_column(Text, nullable=False, default="")
|
||||
author: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
featured_image_url: Mapped[str | None] = mapped_column(String(2048), nullable=True)
|
||||
# Use JSON for broader DB compatibility; PostgreSQL ARRAY is handled via type override in migration
|
||||
tags: Mapped[list] = mapped_column(JSON, nullable=False, default=list)
|
||||
published: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
|
||||
|
||||
__table_args__ = (
|
||||
Index("ix_blog_posts_slug", "slug"),
|
||||
)
|
||||
@@ -0,0 +1,20 @@
|
||||
from datetime import datetime, timezone
|
||||
from sqlalchemy import Text, DateTime, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy import JSON
|
||||
|
||||
from app.models.base import Base
|
||||
|
||||
|
||||
class ContentSection(Base):
|
||||
__tablename__ = "content_sections"
|
||||
|
||||
key: Mapped[str] = mapped_column(Text, primary_key=True)
|
||||
data: Mapped[dict] = mapped_column(JSON().with_variant(JSONB, "postgresql"), nullable=False)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
server_default=func.now(),
|
||||
onupdate=func.now(),
|
||||
nullable=False,
|
||||
)
|
||||
@@ -0,0 +1,27 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import String, Text, JSON, Boolean, Integer, DateTime
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from app.models.base import Base, UUIDMixin, TimestampMixin
|
||||
|
||||
|
||||
class SiteSettings(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "site_settings"
|
||||
|
||||
site_name: Mapped[str] = mapped_column(String(255), nullable=False, default="")
|
||||
tagline: Mapped[str | None] = mapped_column(String(500), nullable=True)
|
||||
logo_url: Mapped[str | None] = mapped_column(String(2048), nullable=True)
|
||||
footer_text: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
social_links: Mapped[dict] = mapped_column(JSON, nullable=False, default=dict)
|
||||
automatic_member_notifications_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||
nz_public_holiday_notifications_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||
invoice_reminder_notifications_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||
invoice_day_of_week: Mapped[int] = mapped_column(Integer, nullable=False, default=1)
|
||||
admin_notifications_cleared_before: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
bookings_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||
walks_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||
messages_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||
two_factor_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||
audit_history_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||
experiments_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||
service_pricing: Mapped[dict] = mapped_column(JSON, nullable=False, default=dict)
|
||||
@@ -0,0 +1,39 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from sqlalchemy import String, Boolean, DateTime, ForeignKey, func
|
||||
from sqlalchemy import Uuid
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from app.models.base import Base, UUIDMixin, TimestampMixin
|
||||
|
||||
|
||||
class User(Base, UUIDMixin, TimestampMixin):
|
||||
__tablename__ = "users"
|
||||
|
||||
email: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True)
|
||||
hashed_password: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
|
||||
|
||||
refresh_tokens: Mapped[list["RefreshToken"]] = relationship(
|
||||
"RefreshToken", back_populates="user", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
|
||||
class RefreshToken(Base, UUIDMixin):
|
||||
__tablename__ = "refresh_tokens"
|
||||
|
||||
user_id: Mapped[uuid.UUID] = mapped_column(
|
||||
Uuid(as_uuid=True),
|
||||
ForeignKey("users.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
token_hash: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
revoked: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
server_default=func.now(),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
user: Mapped["User"] = relationship("User", back_populates="refresh_tokens")
|
||||
@@ -0,0 +1,202 @@
|
||||
import hashlib
|
||||
import secrets
|
||||
|
||||
import httpx
|
||||
import user_agents
|
||||
from fastapi import APIRouter, Depends, Request, Response
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import get_db
|
||||
from app.auth.deps import get_current_user
|
||||
from app.middleware.rate_limit import limiter
|
||||
from app.schemas.analytics import AnalyticsSummary, BookingOperationsSummary, EventCreate
|
||||
from app.services.analytics import get_booking_operations_summary, get_summary, record_event
|
||||
|
||||
router = APIRouter(tags=["Analytics"])
|
||||
ANON_COOKIE_NAME = "__gw_anon"
|
||||
ANON_COOKIE_MAX_AGE = 60 * 60 * 24 * 365
|
||||
CLIENT_METADATA_KEYS = {
|
||||
"area",
|
||||
"channel",
|
||||
"destination",
|
||||
"menu",
|
||||
"plan",
|
||||
"popular",
|
||||
"price",
|
||||
"unit",
|
||||
"variant",
|
||||
}
|
||||
|
||||
_PRIVATE_PREFIXES = ("127.", "10.", "172.16.", "172.17.", "172.18.", "172.19.",
|
||||
"172.20.", "172.21.", "172.22.", "172.23.", "172.24.", "172.25.",
|
||||
"172.26.", "172.27.", "172.28.", "172.29.", "172.30.", "172.31.",
|
||||
"192.168.", "::1", "localhost")
|
||||
|
||||
|
||||
def _mask_ip(ip: str) -> str:
|
||||
"""Return a privacy-safe partial IP: last octet replaced with 'x'."""
|
||||
if ":" in ip: # IPv6 — keep first 4 groups
|
||||
parts = ip.split(":")
|
||||
return ":".join(parts[:4]) + ":x"
|
||||
parts = ip.split(".")
|
||||
if len(parts) == 4:
|
||||
return f"{parts[0]}.{parts[1]}.{parts[2]}.x"
|
||||
return ip
|
||||
|
||||
|
||||
def _get_client_ip(request: Request) -> str | None:
|
||||
"""Resolve the best-effort client IP, preferring forwarded headers."""
|
||||
forwarded = request.headers.get("x-forwarded-for")
|
||||
if forwarded:
|
||||
first = forwarded.split(",")[0].strip()
|
||||
if first:
|
||||
return first
|
||||
real_ip = request.headers.get("x-real-ip")
|
||||
if real_ip:
|
||||
return real_ip.strip()
|
||||
return request.client.host if request.client else None
|
||||
|
||||
|
||||
def _should_secure_cookie(request: Request) -> bool:
|
||||
"""Use Secure cookies in HTTPS contexts, but allow localhost HTTP development."""
|
||||
return request.url.scheme == "https"
|
||||
|
||||
|
||||
def _sanitize_client_metadata(metadata: dict | None) -> dict | None:
|
||||
"""Keep only flat, non-identifying telemetry labels from the browser."""
|
||||
if not metadata:
|
||||
return None
|
||||
|
||||
clean: dict[str, str | int | float | bool] = {}
|
||||
|
||||
for key, value in metadata.items():
|
||||
if not isinstance(key, str) or key not in CLIENT_METADATA_KEYS:
|
||||
continue
|
||||
if isinstance(value, str):
|
||||
clean[key] = value[:120]
|
||||
continue
|
||||
if isinstance(value, bool):
|
||||
clean[key] = value
|
||||
continue
|
||||
if isinstance(value, (int, float)):
|
||||
clean[key] = value
|
||||
|
||||
return clean or None
|
||||
|
||||
|
||||
def _get_or_create_session_id(request: Request, response: Response, payload_session_id: str | None) -> str:
|
||||
"""Use a server-owned anonymous session id, falling back to legacy payload support."""
|
||||
cookie_session_id = request.cookies.get(ANON_COOKIE_NAME)
|
||||
session_id = cookie_session_id or payload_session_id or secrets.token_urlsafe(24)
|
||||
|
||||
if cookie_session_id != session_id:
|
||||
response.set_cookie(
|
||||
key=ANON_COOKIE_NAME,
|
||||
value=session_id,
|
||||
max_age=ANON_COOKIE_MAX_AGE,
|
||||
httponly=True,
|
||||
samesite="lax",
|
||||
secure=_should_secure_cookie(request),
|
||||
path="/",
|
||||
)
|
||||
|
||||
return session_id
|
||||
|
||||
|
||||
def _parse_ua(ua_string: str) -> tuple[str | None, str | None]:
|
||||
"""Parse a User-Agent string into (browser, os_name)."""
|
||||
if not ua_string:
|
||||
return None, None
|
||||
ua = user_agents.parse(ua_string)
|
||||
browser = ua.browser.family
|
||||
if browser and browser != "Other" and ua.browser.version_string:
|
||||
major = ua.browser.version_string.split(".")[0]
|
||||
browser = f"{browser} {major}"
|
||||
os_name = ua.os.family
|
||||
if os_name and os_name != "Other" and ua.os.version_string:
|
||||
os_name = f"{os_name} {ua.os.version_string}"
|
||||
return (
|
||||
None if not browser or browser == "Other" else browser[:100],
|
||||
None if not os_name or os_name == "Other" else os_name[:100],
|
||||
)
|
||||
|
||||
|
||||
async def _geo_lookup(ip: str) -> tuple[str | None, str | None]:
|
||||
"""Resolve IP to (country, city) via ip-api.com. Returns (None, None) on failure."""
|
||||
if not ip or any(ip.startswith(p) for p in _PRIVATE_PREFIXES):
|
||||
return None, None
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=2.0) as client:
|
||||
r = await client.get(
|
||||
f"http://ip-api.com/json/{ip}",
|
||||
params={"fields": "status,country,city"},
|
||||
)
|
||||
if r.status_code == 200:
|
||||
d = r.json()
|
||||
if d.get("status") == "success":
|
||||
return d.get("country"), d.get("city")
|
||||
except Exception:
|
||||
pass
|
||||
return None, None
|
||||
|
||||
|
||||
@router.post("/api/web/event", status_code=201)
|
||||
@router.post("/api/analytics/event", status_code=201)
|
||||
@limiter.limit("60/minute")
|
||||
async def ingest_event(
|
||||
request: Request,
|
||||
response: Response,
|
||||
data: EventCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Record a telemetry event. Public — no auth required."""
|
||||
raw_ip = _get_client_ip(request)
|
||||
|
||||
ip_hash = hashlib.sha256(raw_ip.encode()).hexdigest()[:16] if raw_ip else None
|
||||
ip_partial = _mask_ip(raw_ip) if raw_ip else None
|
||||
|
||||
ua_string = request.headers.get("User-Agent", "")
|
||||
browser, os_name = _parse_ua(ua_string)
|
||||
|
||||
country, city = await _geo_lookup(raw_ip or "")
|
||||
session_id = _get_or_create_session_id(request, response, data.session_id)
|
||||
|
||||
metadata = _sanitize_client_metadata(data.metadata) or {}
|
||||
referer = request.headers.get("referer")
|
||||
if referer:
|
||||
metadata["referrer"] = referer[:255]
|
||||
|
||||
normalized = data.model_copy(update={
|
||||
"session_id": session_id,
|
||||
"metadata": metadata or None,
|
||||
})
|
||||
|
||||
await record_event(
|
||||
db, normalized,
|
||||
ip_hash=ip_hash,
|
||||
ip_partial=ip_partial,
|
||||
user_agent=ua_string[:512] if ua_string else None,
|
||||
browser=browser,
|
||||
os_name=os_name,
|
||||
country=country,
|
||||
city=city,
|
||||
)
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@router.get("/api/v1/analytics/summary", response_model=AnalyticsSummary)
|
||||
async def analytics_summary(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_=Depends(get_current_user),
|
||||
):
|
||||
"""Return analytics summary. Auth required."""
|
||||
return await get_summary(db)
|
||||
|
||||
|
||||
@router.get("/api/v1/analytics/bookings-summary", response_model=BookingOperationsSummary)
|
||||
async def booking_operations_summary(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_=Depends(get_current_user),
|
||||
):
|
||||
"""Return booking operations reporting. Auth required."""
|
||||
return await get_booking_operations_summary(db)
|
||||
@@ -0,0 +1,140 @@
|
||||
"""
|
||||
Audit router.
|
||||
|
||||
Admin:
|
||||
GET /admin/audit — paginated, filtered audit log (admin-authenticated)
|
||||
|
||||
Member:
|
||||
POST /members/audit/page-visit — record a page navigation (member-authenticated)
|
||||
"""
|
||||
import math
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Request, Response
|
||||
from sqlalchemy import func, or_, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.auth.deps import get_current_user
|
||||
from app.auth.member_deps import get_authenticated_member
|
||||
from app.database import get_db
|
||||
from app.middleware.rate_limit import limiter
|
||||
from app.models.audit import AuditLog
|
||||
from app.models.member import Member
|
||||
from app.models.user import User
|
||||
from app.schemas.audit import AuditLogPage, AuditLogResponse, PageVisitSchema
|
||||
from app.services.audit import log_audit
|
||||
from app.services.settings import get_feature_settings_snapshot
|
||||
|
||||
router = APIRouter(tags=["Audit"])
|
||||
|
||||
|
||||
async def _require_audit_history_enabled(db: AsyncSession) -> None:
|
||||
feature_settings = await get_feature_settings_snapshot(db)
|
||||
if not feature_settings.audit_history_enabled:
|
||||
raise HTTPException(status_code=404, detail="Audit history is currently disabled.")
|
||||
|
||||
|
||||
# ── Admin: query audit log ─────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/admin/audit", response_model=AuditLogPage)
|
||||
async def admin_list_audit(
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(50, ge=1, le=200),
|
||||
member_id: Optional[uuid.UUID] = Query(None),
|
||||
action_type: Optional[str] = Query(None),
|
||||
status: Optional[str] = Query(None),
|
||||
area: Optional[str] = Query(None),
|
||||
date_from: Optional[datetime] = Query(None),
|
||||
date_to: Optional[datetime] = Query(None),
|
||||
search: Optional[str] = Query(None),
|
||||
sort_by: str = Query("timestamp"),
|
||||
sort_dir: str = Query("desc"),
|
||||
_admin: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
await _require_audit_history_enabled(db)
|
||||
|
||||
allowed_sort = {"timestamp", "action_type", "status", "area", "member_email"}
|
||||
if sort_by not in allowed_sort:
|
||||
sort_by = "timestamp"
|
||||
|
||||
col = getattr(AuditLog, sort_by)
|
||||
order = col.desc() if sort_dir == "desc" else col.asc()
|
||||
|
||||
conditions = []
|
||||
if member_id is not None:
|
||||
conditions.append(AuditLog.member_id == member_id)
|
||||
if action_type:
|
||||
conditions.append(AuditLog.action_type == action_type)
|
||||
if status:
|
||||
conditions.append(AuditLog.status == status)
|
||||
if area:
|
||||
conditions.append(AuditLog.area.ilike(f"%{area}%"))
|
||||
if date_from:
|
||||
conditions.append(AuditLog.timestamp >= date_from)
|
||||
if date_to:
|
||||
conditions.append(AuditLog.timestamp <= date_to)
|
||||
if search:
|
||||
term = f"%{search}%"
|
||||
conditions.append(
|
||||
or_(
|
||||
AuditLog.member_email.ilike(term),
|
||||
AuditLog.description.ilike(term),
|
||||
AuditLog.area.ilike(term),
|
||||
AuditLog.action_type.ilike(term),
|
||||
AuditLog.error_message.ilike(term),
|
||||
)
|
||||
)
|
||||
|
||||
base_q = select(AuditLog)
|
||||
if conditions:
|
||||
from sqlalchemy import and_
|
||||
base_q = base_q.where(and_(*conditions))
|
||||
|
||||
count_result = await db.execute(select(func.count()).select_from(base_q.subquery()))
|
||||
total = count_result.scalar_one()
|
||||
|
||||
offset = (page - 1) * page_size
|
||||
items_result = await db.execute(base_q.order_by(order).offset(offset).limit(page_size))
|
||||
items = items_result.scalars().all()
|
||||
|
||||
return AuditLogPage(
|
||||
items=[AuditLogResponse.model_validate(i) for i in items],
|
||||
total=total,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
total_pages=max(1, math.ceil(total / page_size)),
|
||||
)
|
||||
|
||||
|
||||
# ── Member: page visit ─────────────────────────────────────────────────────────
|
||||
|
||||
@router.post("/members/audit/page-visit", status_code=204)
|
||||
@limiter.limit("120/minute")
|
||||
async def member_log_page_visit(
|
||||
request: Request,
|
||||
response: Response,
|
||||
data: PageVisitSchema,
|
||||
member: Member = Depends(get_authenticated_member),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
feature_settings = await get_feature_settings_snapshot(db)
|
||||
if not feature_settings.audit_history_enabled:
|
||||
return
|
||||
|
||||
path = data.path[:255] if data.path else "unknown"
|
||||
title = data.title or path
|
||||
|
||||
await log_audit(
|
||||
db,
|
||||
member_id=member.id,
|
||||
member_email=member.email,
|
||||
action_type="page_visit",
|
||||
area=path,
|
||||
description=f"Visited: {title}",
|
||||
status="success",
|
||||
ip_address=request.client.host if request.client else None,
|
||||
user_agent=request.headers.get("User-Agent"),
|
||||
)
|
||||
@@ -0,0 +1,129 @@
|
||||
from datetime import datetime, timezone
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.auth.jwt import (
|
||||
create_access_token,
|
||||
create_refresh_token,
|
||||
hash_refresh_token,
|
||||
get_token_expiry,
|
||||
)
|
||||
from app.auth.password import verify_password
|
||||
from app.database import get_db
|
||||
from app.middleware.rate_limit import limiter
|
||||
from app.models.user import User, RefreshToken
|
||||
from app.schemas.auth import LoginRequest, TokenResponse, RefreshRequest
|
||||
|
||||
router = APIRouter(prefix="/auth", tags=["Auth"])
|
||||
|
||||
|
||||
@router.post("/login", response_model=TokenResponse)
|
||||
@limiter.limit("5/minute")
|
||||
async def login(
|
||||
request: Request,
|
||||
response: Response,
|
||||
data: LoginRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Authenticate with email and password.
|
||||
Returns access token (15 min) and refresh token (7 days).
|
||||
"""
|
||||
result = await db.execute(select(User).where(User.email == data.email))
|
||||
user = result.scalars().first()
|
||||
|
||||
if user is None or not verify_password(data.password, user.hashed_password):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid email or password",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
if not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Account is inactive",
|
||||
)
|
||||
|
||||
access_token = create_access_token(data={"sub": str(user.id)})
|
||||
plaintext_refresh, refresh_hash = create_refresh_token()
|
||||
|
||||
refresh_token_row = RefreshToken(
|
||||
user_id=user.id,
|
||||
token_hash=refresh_hash,
|
||||
expires_at=get_token_expiry(),
|
||||
revoked=False,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
)
|
||||
db.add(refresh_token_row)
|
||||
await db.flush()
|
||||
|
||||
return TokenResponse(
|
||||
access_token=access_token,
|
||||
refresh_token=plaintext_refresh,
|
||||
token_type="bearer",
|
||||
)
|
||||
|
||||
|
||||
@router.post("/refresh", response_model=TokenResponse)
|
||||
@limiter.limit("5/minute")
|
||||
async def refresh_tokens(
|
||||
request: Request,
|
||||
response: Response,
|
||||
data: RefreshRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""
|
||||
Exchange a valid refresh token for a new token pair.
|
||||
The old refresh token is revoked atomically.
|
||||
"""
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid or expired refresh token",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
token_hash = hash_refresh_token(data.refresh_token)
|
||||
result = await db.execute(
|
||||
select(RefreshToken).where(
|
||||
RefreshToken.token_hash == token_hash,
|
||||
RefreshToken.revoked == False,
|
||||
RefreshToken.expires_at > now,
|
||||
)
|
||||
)
|
||||
matched_row = result.scalars().first()
|
||||
|
||||
if matched_row is None:
|
||||
raise credentials_exception
|
||||
|
||||
# Revoke old token
|
||||
matched_row.revoked = True
|
||||
|
||||
# Load user
|
||||
result = await db.execute(select(User).where(User.id == matched_row.user_id))
|
||||
user = result.scalars().first()
|
||||
|
||||
if user is None or not user.is_active:
|
||||
raise credentials_exception
|
||||
|
||||
# Issue new tokens
|
||||
access_token = create_access_token(data={"sub": str(user.id)})
|
||||
plaintext_refresh, refresh_hash = create_refresh_token()
|
||||
|
||||
new_refresh_row = RefreshToken(
|
||||
user_id=user.id,
|
||||
token_hash=refresh_hash,
|
||||
expires_at=get_token_expiry(),
|
||||
revoked=False,
|
||||
created_at=now,
|
||||
)
|
||||
db.add(new_refresh_row)
|
||||
await db.flush()
|
||||
|
||||
return TokenResponse(
|
||||
access_token=access_token,
|
||||
refresh_token=plaintext_refresh,
|
||||
token_type="bearer",
|
||||
)
|
||||
@@ -0,0 +1,167 @@
|
||||
import hashlib
|
||||
import secrets
|
||||
import uuid
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.auth.deps import get_current_user
|
||||
from app.config import settings
|
||||
from app.database import get_db
|
||||
from app.middleware.rate_limit import limiter
|
||||
from app.models.contact_lead import ContactLead
|
||||
from app.models.member import Member, MagicLinkToken
|
||||
from app.models.user import User
|
||||
from app.schemas.contact import (
|
||||
ContactLeadCreate,
|
||||
ContactLeadInviteRequest,
|
||||
ContactLeadInviteResponse,
|
||||
ContactLeadResponse,
|
||||
ContactLeadUpdate,
|
||||
)
|
||||
from app.services.email import send_onboarding_invite
|
||||
|
||||
router = APIRouter(tags=["Contact Leads"])
|
||||
|
||||
|
||||
def _split_name(full_name: str) -> tuple[str, str]:
|
||||
parts = [part for part in full_name.strip().split() if part]
|
||||
if not parts:
|
||||
return "Goodwalk", "Client"
|
||||
if len(parts) == 1:
|
||||
return parts[0], "Client"
|
||||
return parts[0], " ".join(parts[1:])
|
||||
|
||||
|
||||
def _normalise_services(payload: ContactLeadCreate) -> str | None:
|
||||
if payload.services:
|
||||
return ", ".join(payload.services)
|
||||
if payload.service:
|
||||
return payload.service.strip() or None
|
||||
return None
|
||||
|
||||
|
||||
@router.post("/api/contact", response_model=ContactLeadResponse, status_code=201)
|
||||
@limiter.limit("10/minute")
|
||||
async def submit_contact_lead(
|
||||
request: Request,
|
||||
response: Response,
|
||||
data: ContactLeadCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
lead = ContactLead(
|
||||
full_name=data.name.strip(),
|
||||
email=data.email.strip().lower(),
|
||||
phone=(data.phone or "").strip() or None,
|
||||
requested_services=_normalise_services(data),
|
||||
pet_name=(data.petName or "").strip() or None,
|
||||
pet_breed=(data.petBreed or "").strip() or None,
|
||||
suburb=(data.location or "").strip() or None,
|
||||
service_area_status=(data.serviceAreaStatus or "").strip() or None,
|
||||
message=(data.message or "").strip() or None,
|
||||
source=data.source,
|
||||
status="invite",
|
||||
metadata_json={
|
||||
"services": data.services,
|
||||
"service": data.service,
|
||||
},
|
||||
)
|
||||
db.add(lead)
|
||||
await db.flush()
|
||||
await db.refresh(lead)
|
||||
return lead
|
||||
|
||||
|
||||
@router.get("/api/v1/admin/leads", response_model=list[ContactLeadResponse])
|
||||
async def admin_list_leads(
|
||||
_admin: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(select(ContactLead).order_by(ContactLead.created_at.desc()))
|
||||
return result.scalars().all()
|
||||
|
||||
|
||||
@router.put("/api/v1/admin/leads/{lead_id}", response_model=ContactLeadResponse)
|
||||
async def admin_update_lead(
|
||||
lead_id: uuid.UUID,
|
||||
data: ContactLeadUpdate,
|
||||
_admin: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(select(ContactLead).where(ContactLead.id == lead_id))
|
||||
lead = result.scalars().first()
|
||||
if lead is None:
|
||||
raise HTTPException(status_code=404, detail="Lead not found.")
|
||||
|
||||
for field, value in data.model_dump(exclude_unset=True).items():
|
||||
setattr(lead, field, value)
|
||||
await db.flush()
|
||||
await db.refresh(lead)
|
||||
return lead
|
||||
|
||||
|
||||
@router.post("/api/v1/admin/leads/{lead_id}/invite", response_model=ContactLeadInviteResponse)
|
||||
async def admin_invite_lead(
|
||||
lead_id: uuid.UUID,
|
||||
data: ContactLeadInviteRequest,
|
||||
_admin: User = Depends(get_current_user),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
result = await db.execute(select(ContactLead).where(ContactLead.id == lead_id))
|
||||
lead = result.scalars().first()
|
||||
if lead is None:
|
||||
raise HTTPException(status_code=404, detail="Lead not found.")
|
||||
|
||||
existing_member_result = await db.execute(select(Member).where(Member.email == lead.email))
|
||||
member = existing_member_result.scalars().first()
|
||||
|
||||
if member is None:
|
||||
first_name, last_name = _split_name(lead.full_name)
|
||||
member = Member(
|
||||
email=lead.email,
|
||||
first_name=first_name,
|
||||
last_name=last_name,
|
||||
phone=lead.phone,
|
||||
address=lead.suburb,
|
||||
onboarding_data={
|
||||
"dog_name": lead.pet_name,
|
||||
"dog_breed": lead.pet_breed,
|
||||
"preferred_service": lead.requested_services,
|
||||
"lead_message": lead.message,
|
||||
"service_area_status": lead.service_area_status,
|
||||
"source": lead.source,
|
||||
},
|
||||
is_claimed=False,
|
||||
is_active=True,
|
||||
member_status="invited",
|
||||
)
|
||||
db.add(member)
|
||||
await db.flush()
|
||||
|
||||
lead.invited_member_id = member.id
|
||||
lead.invited_at = datetime.now(timezone.utc)
|
||||
lead.status = "invited"
|
||||
await db.flush()
|
||||
await db.refresh(lead)
|
||||
await db.refresh(member)
|
||||
|
||||
if data.send_email:
|
||||
plaintext_token = secrets.token_urlsafe(32)
|
||||
token_hash = hashlib.sha256(plaintext_token.encode()).hexdigest()
|
||||
magic_token = MagicLinkToken(
|
||||
member_id=member.id,
|
||||
token_hash=token_hash,
|
||||
expires_at=datetime.now(timezone.utc) + timedelta(days=7),
|
||||
)
|
||||
db.add(magic_token)
|
||||
await db.flush()
|
||||
magic_url = f"{settings.MEMBERS_URL.rstrip('/')}/join?token={plaintext_token}"
|
||||
await send_onboarding_invite(lead.email, member.first_name, magic_url)
|
||||
|
||||
return ContactLeadInviteResponse(
|
||||
lead=ContactLeadResponse.model_validate(lead),
|
||||
member_id=member.id,
|
||||
member_status=member.member_status,
|
||||
)
|
||||
@@ -0,0 +1,187 @@
|
||||
import re
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.auth.deps import get_current_user
|
||||
from app.database import get_db
|
||||
from app.middleware.rate_limit import limiter
|
||||
from app.schemas.experiments import (
|
||||
ExperimentConversionCreate,
|
||||
ExperimentDefinitionResponse,
|
||||
ExperimentDefinitionUpdate,
|
||||
ExperimentEventCreate,
|
||||
ExperimentImpressionCreate,
|
||||
ExperimentIngestResponse,
|
||||
ExperimentResult,
|
||||
)
|
||||
from app.services.experiments import (
|
||||
experiment_exists,
|
||||
get_experiment_definition,
|
||||
get_experiment_results,
|
||||
list_experiment_definitions,
|
||||
record_experiment_event,
|
||||
upsert_experiment_definition,
|
||||
)
|
||||
from app.services.settings import get_feature_settings_snapshot
|
||||
|
||||
router = APIRouter(tags=["Experiments"])
|
||||
BOT_UA_PATTERN = re.compile(r"(bot|crawler|spider|slurp|preview|headless)", re.IGNORECASE)
|
||||
|
||||
|
||||
def _is_bot_request(request: Request) -> bool:
|
||||
user_agent = request.headers.get("user-agent", "")
|
||||
return bool(BOT_UA_PATTERN.search(user_agent))
|
||||
|
||||
|
||||
def _validate_experiment_assignment(experiment_key: str, variant_key: str) -> None:
|
||||
if not experiment_exists(experiment_key, variant_key):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail="unknown experiment or variant",
|
||||
)
|
||||
|
||||
|
||||
async def _experiments_enabled(db: AsyncSession) -> bool:
|
||||
feature_settings = await get_feature_settings_snapshot(db)
|
||||
return feature_settings.experiments_enabled
|
||||
|
||||
|
||||
async def _require_experiments_enabled(db: AsyncSession) -> None:
|
||||
if not await _experiments_enabled(db):
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Experiments are currently disabled.")
|
||||
|
||||
|
||||
@router.get("/api/experiments", response_model=list[ExperimentDefinitionResponse])
|
||||
async def get_experiments(db: AsyncSession = Depends(get_db)):
|
||||
if not await _experiments_enabled(db):
|
||||
return []
|
||||
return await list_experiment_definitions(db)
|
||||
|
||||
|
||||
@router.post("/api/experiments/impression", response_model=ExperimentIngestResponse, status_code=202)
|
||||
@limiter.limit("30/minute")
|
||||
async def ingest_experiment_impression(
|
||||
request: Request,
|
||||
response: Response,
|
||||
payload: ExperimentImpressionCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
if not await _experiments_enabled(db):
|
||||
return ExperimentIngestResponse(ok=True, accepted=False)
|
||||
|
||||
_validate_experiment_assignment(payload.experiment_key, payload.variant_key)
|
||||
|
||||
if _is_bot_request(request):
|
||||
return ExperimentIngestResponse(ok=True, accepted=False)
|
||||
|
||||
await record_experiment_event(db, payload)
|
||||
return ExperimentIngestResponse(ok=True, accepted=True)
|
||||
|
||||
|
||||
@router.post("/api/experiments/event", response_model=ExperimentIngestResponse, status_code=202)
|
||||
@limiter.limit("30/minute")
|
||||
async def ingest_experiment_event(
|
||||
request: Request,
|
||||
response: Response,
|
||||
payload: ExperimentEventCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
if not await _experiments_enabled(db):
|
||||
return ExperimentIngestResponse(ok=True, accepted=False)
|
||||
|
||||
_validate_experiment_assignment(payload.experiment_key, payload.variant_key)
|
||||
|
||||
if _is_bot_request(request):
|
||||
return ExperimentIngestResponse(ok=True, accepted=False)
|
||||
|
||||
await record_experiment_event(db, payload)
|
||||
return ExperimentIngestResponse(ok=True, accepted=True)
|
||||
|
||||
|
||||
@router.post("/api/experiments/conversion", response_model=ExperimentIngestResponse, status_code=202)
|
||||
@limiter.limit("30/minute")
|
||||
async def ingest_experiment_conversion(
|
||||
request: Request,
|
||||
response: Response,
|
||||
payload: ExperimentConversionCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
if not await _experiments_enabled(db):
|
||||
return ExperimentIngestResponse(ok=True, accepted=False)
|
||||
|
||||
_validate_experiment_assignment(payload.experiment_key, payload.variant_key)
|
||||
|
||||
if _is_bot_request(request):
|
||||
return ExperimentIngestResponse(ok=True, accepted=False)
|
||||
|
||||
await record_experiment_event(db, payload)
|
||||
return ExperimentIngestResponse(ok=True, accepted=True)
|
||||
|
||||
|
||||
@router.get("/api/v1/experiments/results", response_model=list[ExperimentResult])
|
||||
async def experiment_results(
|
||||
experiment_key: str | None = None,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_=Depends(get_current_user),
|
||||
):
|
||||
await _require_experiments_enabled(db)
|
||||
return await get_experiment_results(db, experiment_key)
|
||||
|
||||
|
||||
@router.get("/api/admin/experiments", response_model=list[ExperimentDefinitionResponse])
|
||||
async def admin_list_experiments(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_=Depends(get_current_user),
|
||||
):
|
||||
await _require_experiments_enabled(db)
|
||||
return await list_experiment_definitions(db)
|
||||
|
||||
|
||||
@router.get("/api/admin/experiments/{experiment_key}", response_model=ExperimentDefinitionResponse)
|
||||
async def admin_get_experiment(
|
||||
experiment_key: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_=Depends(get_current_user),
|
||||
):
|
||||
await _require_experiments_enabled(db)
|
||||
experiment = await get_experiment_definition(db, experiment_key)
|
||||
if experiment is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Experiment not found")
|
||||
|
||||
definitions = await list_experiment_definitions(db)
|
||||
match = next((item for item in definitions if item.experiment_key == experiment_key), None)
|
||||
assert match is not None
|
||||
return match
|
||||
|
||||
|
||||
@router.put("/api/admin/experiments/{experiment_key}", response_model=ExperimentDefinitionResponse)
|
||||
async def admin_update_experiment(
|
||||
experiment_key: str,
|
||||
payload: ExperimentDefinitionUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_=Depends(get_current_user),
|
||||
):
|
||||
await _require_experiments_enabled(db)
|
||||
try:
|
||||
experiment = await upsert_experiment_definition(db, experiment_key, payload)
|
||||
except ValueError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
|
||||
|
||||
return ExperimentDefinitionResponse(
|
||||
experiment_key=experiment.experiment_key,
|
||||
cookie_name=experiment.cookie_name,
|
||||
name=experiment.name,
|
||||
description=experiment.description,
|
||||
enabled=experiment.enabled,
|
||||
eligible_routes=experiment.eligible_routes,
|
||||
variants=[
|
||||
{
|
||||
"variant_key": variant.variant_key,
|
||||
"label": variant.label,
|
||||
"allocation": variant.allocation,
|
||||
"is_control": variant.is_control,
|
||||
}
|
||||
for variant in experiment.variants
|
||||
],
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,64 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.auth.deps import get_current_user
|
||||
from app.database import get_db
|
||||
from app.models.user import User
|
||||
from app.schemas.page import PageCreate, PageUpdate, PageResponse
|
||||
from app.services import pages as page_service
|
||||
from typing import List
|
||||
|
||||
router = APIRouter(prefix="/pages", tags=["Pages"])
|
||||
|
||||
|
||||
@router.get("", response_model=List[PageResponse])
|
||||
async def list_pages(db: AsyncSession = Depends(get_db)):
|
||||
"""List all published pages."""
|
||||
pages = await page_service.get_published_pages(db)
|
||||
return [PageResponse.model_validate(p) for p in pages]
|
||||
|
||||
|
||||
@router.get("/{slug}", response_model=PageResponse)
|
||||
async def get_page(slug: str, db: AsyncSession = Depends(get_db)):
|
||||
"""Get a single published page by slug."""
|
||||
page = await page_service.get_page_by_slug(db, slug, published_only=True)
|
||||
if page is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Page '{slug}' not found")
|
||||
return PageResponse.model_validate(page)
|
||||
|
||||
|
||||
@router.post("", response_model=PageResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def create_page(
|
||||
data: PageCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
"""Create a new page (auth required)."""
|
||||
page = await page_service.create_page(db, data)
|
||||
return PageResponse.model_validate(page)
|
||||
|
||||
|
||||
@router.put("/{slug}", response_model=PageResponse)
|
||||
async def update_page(
|
||||
slug: str,
|
||||
data: PageUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
"""Update a page by slug (auth required)."""
|
||||
page = await page_service.update_page(db, slug, data)
|
||||
if page is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Page '{slug}' not found")
|
||||
return PageResponse.model_validate(page)
|
||||
|
||||
|
||||
@router.delete("/{slug}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_page(
|
||||
slug: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
"""Delete a page by slug (auth required)."""
|
||||
deleted = await page_service.delete_page(db, slug)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Page '{slug}' not found")
|
||||
@@ -0,0 +1,66 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.auth.deps import get_current_user
|
||||
from app.database import get_db
|
||||
from app.models.user import User
|
||||
from app.schemas.post import PostCreate, PostUpdate, PostResponse, PaginatedPostsResponse
|
||||
from app.services import posts as post_service
|
||||
|
||||
router = APIRouter(prefix="/posts", tags=["Posts"])
|
||||
|
||||
|
||||
@router.get("", response_model=PaginatedPostsResponse)
|
||||
async def list_posts(
|
||||
page: int = Query(default=1, ge=1),
|
||||
per_page: int = Query(default=10, ge=1, le=100),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""List published posts with pagination."""
|
||||
return await post_service.get_published_posts(db, page=page, per_page=per_page)
|
||||
|
||||
|
||||
@router.get("/{slug}", response_model=PostResponse)
|
||||
async def get_post(slug: str, db: AsyncSession = Depends(get_db)):
|
||||
"""Get a single published post by slug."""
|
||||
post = await post_service.get_post_by_slug(db, slug, published_only=True)
|
||||
if post is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Post '{slug}' not found")
|
||||
return PostResponse.model_validate(post)
|
||||
|
||||
|
||||
@router.post("", response_model=PostResponse, status_code=status.HTTP_201_CREATED)
|
||||
async def create_post(
|
||||
data: PostCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
"""Create a new blog post (auth required)."""
|
||||
post = await post_service.create_post(db, data)
|
||||
return PostResponse.model_validate(post)
|
||||
|
||||
|
||||
@router.put("/{slug}", response_model=PostResponse)
|
||||
async def update_post(
|
||||
slug: str,
|
||||
data: PostUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
"""Update a post by slug (auth required)."""
|
||||
post = await post_service.update_post(db, slug, data)
|
||||
if post is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Post '{slug}' not found")
|
||||
return PostResponse.model_validate(post)
|
||||
|
||||
|
||||
@router.delete("/{slug}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_post(
|
||||
slug: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
"""Delete a post by slug (auth required)."""
|
||||
deleted = await post_service.delete_post(db, slug)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Post '{slug}' not found")
|
||||
@@ -0,0 +1,101 @@
|
||||
"""
|
||||
Legacy-compatible content section endpoints.
|
||||
Matches the URL shapes the SvelteKit frontend already calls,
|
||||
so no frontend changes are needed.
|
||||
"""
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import get_db
|
||||
from app.auth.deps import get_current_user
|
||||
from app.models.user import User
|
||||
from app.services.sections import get_section, upsert_section, list_sections
|
||||
|
||||
router = APIRouter(tags=["Sections"])
|
||||
|
||||
# Slug → content_sections key
|
||||
PAGE_SLUG_MAP = {
|
||||
"home": "pages.home",
|
||||
"pack-walks": "pages.packWalks",
|
||||
"1-1-walks": "pages.oneOnOneWalks",
|
||||
"puppy-visits": "pages.puppyVisits",
|
||||
"pricing": "pages.pricing",
|
||||
"about": "pages.about",
|
||||
"contact": "pages.contact",
|
||||
}
|
||||
|
||||
|
||||
# ── Public read endpoints ────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/api/site-settings")
|
||||
async def site_settings(db: AsyncSession = Depends(get_db)):
|
||||
data = await get_section(db, "siteSettings")
|
||||
return data or {}
|
||||
|
||||
|
||||
@router.get("/api/navigation")
|
||||
async def navigation(db: AsyncSession = Depends(get_db)):
|
||||
data = await get_section(db, "navigation")
|
||||
return data or {"items": []}
|
||||
|
||||
|
||||
@router.get("/api/footer")
|
||||
async def footer(db: AsyncSession = Depends(get_db)):
|
||||
data = await get_section(db, "footer")
|
||||
return data or {}
|
||||
|
||||
|
||||
@router.get("/api/testimonials")
|
||||
async def testimonials(db: AsyncSession = Depends(get_db)):
|
||||
data = await get_section(db, "testimonials")
|
||||
return data if data is not None else []
|
||||
|
||||
|
||||
@router.get("/api/onboarding")
|
||||
async def onboarding(db: AsyncSession = Depends(get_db)):
|
||||
data = await get_section(db, "onboarding")
|
||||
return data or {}
|
||||
|
||||
|
||||
@router.get("/api/pages/{slug}")
|
||||
async def page_by_slug(slug: str, db: AsyncSession = Depends(get_db)):
|
||||
key = PAGE_SLUG_MAP.get(slug)
|
||||
if not key:
|
||||
raise HTTPException(status_code=404, detail=f"Page '{slug}' not found")
|
||||
data = await get_section(db, key)
|
||||
if data is None:
|
||||
raise HTTPException(status_code=404, detail=f"Page '{slug}' not found")
|
||||
return data
|
||||
|
||||
|
||||
# ── Protected admin endpoints ────────────────────────────────────────────────
|
||||
|
||||
@router.get("/api/admin/sections")
|
||||
async def admin_list_sections(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: User = Depends(get_current_user),
|
||||
):
|
||||
return await list_sections(db)
|
||||
|
||||
|
||||
@router.get("/api/admin/sections/{key:path}")
|
||||
async def admin_get_section(
|
||||
key: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: User = Depends(get_current_user),
|
||||
):
|
||||
data = await get_section(db, key)
|
||||
if data is None:
|
||||
raise HTTPException(status_code=404, detail="Section not found")
|
||||
return {"key": key, "data": data}
|
||||
|
||||
|
||||
@router.put("/api/admin/sections/{key:path}")
|
||||
async def admin_update_section(
|
||||
key: str,
|
||||
body: dict,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: User = Depends(get_current_user),
|
||||
):
|
||||
row = await upsert_section(db, key, body)
|
||||
return {"success": True, "key": row.key}
|
||||
@@ -0,0 +1,202 @@
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.auth.deps import get_current_user
|
||||
from app.database import get_db
|
||||
from app.models.user import User
|
||||
from app.schemas.settings import (
|
||||
FeatureSettingsResponse,
|
||||
FeatureSettingsUpdate,
|
||||
PlannerWeatherResponse,
|
||||
ServicePricingSettingsResponse,
|
||||
ServicePricingSettingsUpdate,
|
||||
SiteSettingsResponse,
|
||||
SiteSettingsUpdate,
|
||||
)
|
||||
from app.services import settings as settings_service
|
||||
from app.services.settings import FeatureSettingsSchemaOutdatedError, ServicePricingSchemaOutdatedError
|
||||
|
||||
router = APIRouter(prefix="/settings", tags=["Settings"])
|
||||
PLANNER_WEATHER_URL = "https://api.open-meteo.com/v1/forecast"
|
||||
PLANNER_WEATHER_TTL = timedelta(hours=1)
|
||||
PLANNER_WEATHER_CACHE = {
|
||||
"fetched_at": datetime.min.replace(tzinfo=UTC),
|
||||
"weather": {},
|
||||
}
|
||||
|
||||
|
||||
async def _load_planner_weather_snapshot() -> tuple[datetime, dict[str, dict[str, int]]]:
|
||||
fetched_at = PLANNER_WEATHER_CACHE["fetched_at"]
|
||||
cached_weather = PLANNER_WEATHER_CACHE["weather"]
|
||||
now = datetime.now(UTC)
|
||||
|
||||
if cached_weather and now - fetched_at < PLANNER_WEATHER_TTL:
|
||||
return fetched_at, cached_weather
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
response = await client.get(
|
||||
PLANNER_WEATHER_URL,
|
||||
params={
|
||||
"latitude": -36.85,
|
||||
"longitude": 174.77,
|
||||
"daily": "weathercode,temperature_2m_max,temperature_2m_min",
|
||||
"timezone": "Pacific/Auckland",
|
||||
"forecast_days": 16,
|
||||
"past_days": 14,
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
payload = response.json()
|
||||
except httpx.HTTPError:
|
||||
if cached_weather:
|
||||
return fetched_at, cached_weather
|
||||
raise
|
||||
|
||||
next_weather: dict[str, dict[str, int]] = {}
|
||||
daily = payload.get("daily") or {}
|
||||
dates = daily.get("time") or []
|
||||
codes = daily.get("weathercode") or []
|
||||
highs = daily.get("temperature_2m_max") or []
|
||||
lows = daily.get("temperature_2m_min") or []
|
||||
|
||||
for index, date_key in enumerate(dates):
|
||||
if index >= len(codes) or index >= len(highs) or index >= len(lows):
|
||||
continue
|
||||
next_weather[date_key] = {
|
||||
"code": int(codes[index]),
|
||||
"max": round(highs[index]),
|
||||
"min": round(lows[index]),
|
||||
}
|
||||
|
||||
fetched_at = now
|
||||
PLANNER_WEATHER_CACHE["fetched_at"] = fetched_at
|
||||
PLANNER_WEATHER_CACHE["weather"] = next_weather
|
||||
return fetched_at, next_weather
|
||||
|
||||
|
||||
def _filter_planner_weather(
|
||||
weather: dict[str, dict[str, int]],
|
||||
start_date: str | None,
|
||||
end_date: str | None,
|
||||
) -> dict[str, dict[str, int]]:
|
||||
if not start_date and not end_date:
|
||||
return weather
|
||||
|
||||
filtered: dict[str, dict[str, int]] = {}
|
||||
for key, value in weather.items():
|
||||
if start_date and key < start_date:
|
||||
continue
|
||||
if end_date and key > end_date:
|
||||
continue
|
||||
filtered[key] = value
|
||||
return filtered
|
||||
|
||||
|
||||
@router.get("", response_model=SiteSettingsResponse)
|
||||
async def get_settings(db: AsyncSession = Depends(get_db)):
|
||||
"""Get site settings singleton."""
|
||||
row = await settings_service.get_settings(db)
|
||||
if row is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Site settings have not been configured yet. Run seed.py to initialise.",
|
||||
)
|
||||
return SiteSettingsResponse.model_validate(row)
|
||||
|
||||
|
||||
@router.put("", response_model=SiteSettingsResponse)
|
||||
async def update_settings(
|
||||
data: SiteSettingsUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
"""Create or update site settings singleton (auth required)."""
|
||||
row = await settings_service.upsert_settings(db, data)
|
||||
return SiteSettingsResponse.model_validate(row)
|
||||
|
||||
|
||||
@router.get("/features", response_model=FeatureSettingsResponse)
|
||||
async def get_feature_settings(db: AsyncSession = Depends(get_db)):
|
||||
snapshot = await settings_service.get_feature_settings_snapshot(db)
|
||||
return FeatureSettingsResponse(
|
||||
bookings_enabled=snapshot.bookings_enabled,
|
||||
walks_enabled=snapshot.walks_enabled,
|
||||
messages_enabled=snapshot.messages_enabled,
|
||||
two_factor_enabled=snapshot.two_factor_enabled,
|
||||
audit_history_enabled=snapshot.audit_history_enabled,
|
||||
experiments_enabled=snapshot.experiments_enabled,
|
||||
)
|
||||
|
||||
|
||||
@router.put("/features", response_model=FeatureSettingsResponse)
|
||||
async def update_feature_settings(
|
||||
data: FeatureSettingsUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
del current_user
|
||||
try:
|
||||
snapshot = await settings_service.update_feature_settings_snapshot(db, data)
|
||||
except FeatureSettingsSchemaOutdatedError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(exc))
|
||||
return FeatureSettingsResponse(
|
||||
bookings_enabled=snapshot.bookings_enabled,
|
||||
walks_enabled=snapshot.walks_enabled,
|
||||
messages_enabled=snapshot.messages_enabled,
|
||||
two_factor_enabled=snapshot.two_factor_enabled,
|
||||
audit_history_enabled=snapshot.audit_history_enabled,
|
||||
experiments_enabled=snapshot.experiments_enabled,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/pricing", response_model=ServicePricingSettingsResponse)
|
||||
async def get_service_pricing(db: AsyncSession = Depends(get_db)):
|
||||
snapshot = await settings_service.get_service_pricing_snapshot(db)
|
||||
return ServicePricingSettingsResponse(service_pricing=snapshot)
|
||||
|
||||
|
||||
@router.put("/pricing", response_model=ServicePricingSettingsResponse)
|
||||
async def update_service_pricing(
|
||||
data: ServicePricingSettingsUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
del current_user
|
||||
try:
|
||||
snapshot = await settings_service.update_service_pricing_snapshot(
|
||||
db,
|
||||
service_pricing=data.service_pricing,
|
||||
)
|
||||
except ServicePricingSchemaOutdatedError as exc:
|
||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(exc))
|
||||
return ServicePricingSettingsResponse(service_pricing=snapshot)
|
||||
|
||||
|
||||
@router.get("/planner-weather", response_model=PlannerWeatherResponse)
|
||||
async def get_planner_weather(
|
||||
start_date: str | None = Query(default=None),
|
||||
end_date: str | None = Query(default=None),
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
del current_user
|
||||
|
||||
for value, label in ((start_date, "start_date"), (end_date, "end_date")):
|
||||
if not value:
|
||||
continue
|
||||
try:
|
||||
datetime.strptime(value, "%Y-%m-%d")
|
||||
except ValueError as exc:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail=f"{label} must use YYYY-MM-DD format.",
|
||||
) from exc
|
||||
|
||||
fetched_at, weather = await _load_planner_weather_snapshot()
|
||||
return PlannerWeatherResponse(
|
||||
fetched_at=fetched_at,
|
||||
weather=_filter_planner_weather(weather, start_date, end_date),
|
||||
)
|
||||
@@ -0,0 +1,85 @@
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
|
||||
|
||||
class EventCreate(BaseModel):
|
||||
event_type: str = Field(..., max_length=64)
|
||||
page: str = Field(..., max_length=255)
|
||||
element: Optional[str] = Field(None, max_length=255)
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
session_id: Optional[str] = Field(None, max_length=64)
|
||||
|
||||
|
||||
class EventResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
event_type: str
|
||||
page: str
|
||||
element: Optional[str]
|
||||
session_id: str
|
||||
ip_partial: Optional[str]
|
||||
browser: Optional[str]
|
||||
os_name: Optional[str]
|
||||
country: Optional[str]
|
||||
city: Optional[str]
|
||||
created_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class DailyStat(BaseModel):
|
||||
date: str
|
||||
count: int
|
||||
|
||||
|
||||
class TopItem(BaseModel):
|
||||
label: str
|
||||
count: int
|
||||
|
||||
|
||||
class AnalyticsSummary(BaseModel):
|
||||
total_events_today: int
|
||||
total_events_yesterday: int
|
||||
page_views_today: int
|
||||
unique_sessions_today: int
|
||||
unique_sessions_total: int
|
||||
total_events_all_time: int
|
||||
events_by_type: List[TopItem]
|
||||
top_pages: List[TopItem]
|
||||
top_elements: List[TopItem]
|
||||
top_journeys: List[TopItem]
|
||||
top_browsers: List[TopItem]
|
||||
top_os: List[TopItem]
|
||||
top_countries: List[TopItem]
|
||||
events_last_7_days: List[DailyStat]
|
||||
recent_events: List[EventResponse]
|
||||
|
||||
|
||||
class BookingActivityStat(BaseModel):
|
||||
date: str
|
||||
booked: int
|
||||
cancellations: int
|
||||
|
||||
|
||||
class BookingForwardLoadStat(BaseModel):
|
||||
date: str
|
||||
total: int
|
||||
am: int
|
||||
pm: int
|
||||
|
||||
|
||||
class BookingCustomerVolume(BaseModel):
|
||||
label: str
|
||||
count: int
|
||||
|
||||
|
||||
class BookingOperationsSummary(BaseModel):
|
||||
active_bookings_total: int
|
||||
forward_load_total: int
|
||||
booked_last_30_days: int
|
||||
cancellations_last_30_days: int
|
||||
high_volume_customer_count: int
|
||||
forward_load_next_14_days: List[BookingForwardLoadStat]
|
||||
activity_last_30_days: List[BookingActivityStat]
|
||||
top_high_volume_customers: List[BookingCustomerVolume]
|
||||
@@ -0,0 +1,37 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AuditLogResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
timestamp: datetime
|
||||
member_id: Optional[uuid.UUID]
|
||||
member_email: Optional[str]
|
||||
action_type: str
|
||||
area: str
|
||||
description: str
|
||||
status: str
|
||||
booking_id: Optional[uuid.UUID]
|
||||
error_message: Optional[str]
|
||||
error_detail: Optional[str]
|
||||
ip_address: Optional[str]
|
||||
user_agent: Optional[str]
|
||||
extra: Optional[dict]
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class AuditLogPage(BaseModel):
|
||||
items: list[AuditLogResponse]
|
||||
total: int
|
||||
page: int
|
||||
page_size: int
|
||||
total_pages: int
|
||||
|
||||
|
||||
class PageVisitSchema(BaseModel):
|
||||
path: str
|
||||
title: Optional[str] = None
|
||||
@@ -0,0 +1,28 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, EmailStr, ConfigDict
|
||||
|
||||
|
||||
class LoginRequest(BaseModel):
|
||||
email: str
|
||||
password: str
|
||||
|
||||
|
||||
class TokenResponse(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
token_type: str = "bearer"
|
||||
|
||||
|
||||
class RefreshRequest(BaseModel):
|
||||
refresh_token: str
|
||||
|
||||
|
||||
class UserResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
email: str
|
||||
is_active: bool
|
||||
created_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
@@ -0,0 +1,57 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ContactLeadCreate(BaseModel):
|
||||
name: str = Field(min_length=1, max_length=255)
|
||||
email: str
|
||||
phone: Optional[str] = Field(default=None, max_length=50)
|
||||
service: Optional[str] = Field(default=None, max_length=255)
|
||||
services: list[str] = Field(default_factory=list)
|
||||
petName: Optional[str] = Field(default=None, max_length=100)
|
||||
petBreed: Optional[str] = Field(default=None, max_length=100)
|
||||
location: Optional[str] = Field(default=None, max_length=100)
|
||||
serviceAreaStatus: Optional[str] = Field(default=None, max_length=32)
|
||||
message: Optional[str] = Field(default=None, max_length=5000)
|
||||
source: str = Field(default="contact_form", max_length=50)
|
||||
|
||||
|
||||
class ContactLeadResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
full_name: str
|
||||
email: str
|
||||
phone: Optional[str]
|
||||
requested_services: Optional[str]
|
||||
pet_name: Optional[str]
|
||||
pet_breed: Optional[str]
|
||||
suburb: Optional[str]
|
||||
service_area_status: Optional[str]
|
||||
message: Optional[str]
|
||||
source: str
|
||||
status: str
|
||||
notes: Optional[str]
|
||||
invited_at: Optional[datetime]
|
||||
invited_member_id: Optional[uuid.UUID]
|
||||
metadata_json: Optional[dict[str, Any]]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class ContactLeadUpdate(BaseModel):
|
||||
status: Optional[str] = Field(default=None, max_length=32)
|
||||
notes: Optional[str] = Field(default=None, max_length=5000)
|
||||
|
||||
|
||||
class ContactLeadInviteRequest(BaseModel):
|
||||
send_email: bool = True
|
||||
|
||||
|
||||
class ContactLeadInviteResponse(BaseModel):
|
||||
lead: ContactLeadResponse
|
||||
member_id: uuid.UUID
|
||||
member_status: str
|
||||
@@ -0,0 +1,154 @@
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
||||
|
||||
KEY_PATTERN = r"^[a-z0-9_]{3,64}$"
|
||||
SESSION_PATTERN = r"^[A-Za-z0-9_-]{8,128}$"
|
||||
|
||||
|
||||
def _validate_metadata(metadata: dict[str, Any] | None) -> dict[str, Any] | None:
|
||||
if metadata is None:
|
||||
return None
|
||||
|
||||
if len(metadata) > 20:
|
||||
raise ValueError("metadata must contain at most 20 keys")
|
||||
|
||||
clean: dict[str, Any] = {}
|
||||
|
||||
for key, value in metadata.items():
|
||||
if not isinstance(key, str) or len(key) > 48:
|
||||
raise ValueError("metadata keys must be strings up to 48 characters")
|
||||
if isinstance(value, (str, int, float, bool)) or value is None:
|
||||
clean[key] = value
|
||||
continue
|
||||
raise ValueError("metadata values must be scalar JSON types")
|
||||
|
||||
return clean
|
||||
|
||||
|
||||
class ExperimentVariantDefinition(BaseModel):
|
||||
variant_key: str = Field(..., pattern=KEY_PATTERN)
|
||||
label: str = Field(..., min_length=1, max_length=120)
|
||||
allocation: int = Field(..., ge=0, le=100)
|
||||
is_control: bool
|
||||
|
||||
|
||||
class ExperimentDefinitionResponse(BaseModel):
|
||||
experiment_key: str = Field(..., pattern=KEY_PATTERN)
|
||||
cookie_name: str = Field(..., min_length=3, max_length=96)
|
||||
name: str
|
||||
description: str | None = None
|
||||
enabled: bool
|
||||
eligible_routes: list[str]
|
||||
variants: list[ExperimentVariantDefinition]
|
||||
|
||||
|
||||
class ExperimentDefinitionUpdate(BaseModel):
|
||||
cookie_name: str = Field(..., min_length=3, max_length=96)
|
||||
name: str = Field(..., min_length=1, max_length=120)
|
||||
description: str | None = Field(default=None, max_length=512)
|
||||
enabled: bool
|
||||
eligible_routes: list[str] = Field(default_factory=list, min_length=1)
|
||||
variants: list[ExperimentVariantDefinition] = Field(..., min_length=2)
|
||||
|
||||
@field_validator("cookie_name")
|
||||
@classmethod
|
||||
def validate_cookie_name(cls, value: str) -> str:
|
||||
if not value.startswith("exp_"):
|
||||
raise ValueError("cookie_name must start with 'exp_'")
|
||||
return value
|
||||
|
||||
@field_validator("eligible_routes")
|
||||
@classmethod
|
||||
def validate_routes(cls, value: list[str]) -> list[str]:
|
||||
normalized: list[str] = []
|
||||
for route in value:
|
||||
if not route.startswith("/"):
|
||||
raise ValueError("eligible routes must start with '/'")
|
||||
normalized.append(route.rstrip("/") or "/")
|
||||
return normalized
|
||||
|
||||
@field_validator("variants")
|
||||
@classmethod
|
||||
def validate_variants(cls, value: list[ExperimentVariantDefinition]) -> list[ExperimentVariantDefinition]:
|
||||
if sum(1 for item in value if item.is_control) != 1:
|
||||
raise ValueError("exactly one control variant is required")
|
||||
if sum(item.allocation for item in value) <= 0:
|
||||
raise ValueError("variant allocation total must be greater than zero")
|
||||
return value
|
||||
|
||||
|
||||
class ExperimentEventBase(BaseModel):
|
||||
experiment_key: str = Field(..., pattern=KEY_PATTERN)
|
||||
variant_key: str = Field(..., pattern=KEY_PATTERN)
|
||||
session_id: str = Field(..., pattern=SESSION_PATTERN)
|
||||
user_id: str | None = Field(None, max_length=64)
|
||||
path: str = Field(..., min_length=1, max_length=255)
|
||||
timestamp: datetime
|
||||
metadata: dict[str, Any] | None = None
|
||||
|
||||
@field_validator("path")
|
||||
@classmethod
|
||||
def validate_path(cls, value: str) -> str:
|
||||
if not value.startswith("/"):
|
||||
raise ValueError("path must start with '/'")
|
||||
return value
|
||||
|
||||
@field_validator("metadata")
|
||||
@classmethod
|
||||
def validate_metadata(cls, value: dict[str, Any] | None) -> dict[str, Any] | None:
|
||||
return _validate_metadata(value)
|
||||
|
||||
|
||||
class ExperimentImpressionCreate(ExperimentEventBase):
|
||||
event_name: str = Field(default="impression", pattern=r"^impression$")
|
||||
|
||||
|
||||
class ExperimentEventCreate(ExperimentEventBase):
|
||||
event_name: str = Field(..., pattern=r"^(cta_click|form_start|form_submit)$")
|
||||
|
||||
|
||||
class ExperimentConversionCreate(ExperimentEventBase):
|
||||
event_name: str = Field(default="conversion", pattern=r"^conversion$")
|
||||
conversion_value: Decimal | None = Field(default=None, max_digits=12, decimal_places=2)
|
||||
|
||||
|
||||
class ExperimentEventResponse(BaseModel):
|
||||
id: UUID
|
||||
experiment_key: str
|
||||
variant_key: str
|
||||
session_id: str
|
||||
user_id: str | None = None
|
||||
path: str
|
||||
event_type: str
|
||||
conversion_value: Decimal | None = None
|
||||
metadata: dict[str, Any] | None = None
|
||||
created_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class ExperimentVariantResult(BaseModel):
|
||||
variant_key: str
|
||||
impressions: int
|
||||
cta_clicks: int
|
||||
form_starts: int
|
||||
form_submits: int
|
||||
conversions: int
|
||||
unique_sessions: int
|
||||
conversion_rate: float
|
||||
conversion_value_total: float
|
||||
|
||||
|
||||
class ExperimentResult(BaseModel):
|
||||
experiment_key: str
|
||||
generated_at: datetime
|
||||
variants: list[ExperimentVariantResult]
|
||||
|
||||
|
||||
class ExperimentIngestResponse(BaseModel):
|
||||
ok: bool
|
||||
accepted: bool
|
||||
@@ -0,0 +1,370 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional, Any
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
# ── Magic link ─────────────────────────────────────────────────────────────────
|
||||
|
||||
class MagicLinkVerifySchema(BaseModel):
|
||||
token: str
|
||||
|
||||
|
||||
# ── Claim ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
class ClaimRequestSchema(BaseModel):
|
||||
email: str
|
||||
|
||||
|
||||
class ClaimCompleteSchema(BaseModel):
|
||||
email: str
|
||||
code: str
|
||||
password: str
|
||||
|
||||
|
||||
class MemberClaimVerifyCodeSchema(BaseModel):
|
||||
code: str
|
||||
password: str
|
||||
|
||||
|
||||
# ── Auth ───────────────────────────────────────────────────────────────────────
|
||||
|
||||
class MemberLoginSchema(BaseModel):
|
||||
email: str
|
||||
password: str
|
||||
|
||||
|
||||
class MemberLoginVerifySchema(BaseModel):
|
||||
email: str
|
||||
code: str
|
||||
|
||||
|
||||
class MemberTokenResponse(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
token_type: str = "bearer"
|
||||
|
||||
|
||||
class MemberRefreshSchema(BaseModel):
|
||||
refresh_token: str
|
||||
|
||||
|
||||
class MemberLogoutSchema(BaseModel):
|
||||
refresh_token: Optional[str] = None
|
||||
|
||||
|
||||
# ── Profile ────────────────────────────────────────────────────────────────────
|
||||
|
||||
class MemberProfileResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
email: str
|
||||
first_name: str
|
||||
last_name: str
|
||||
phone: Optional[str]
|
||||
address: Optional[str]
|
||||
emergency_contact: Optional[str]
|
||||
notifications_enabled: bool
|
||||
is_claimed: bool
|
||||
member_status: str
|
||||
activated_at: Optional[datetime]
|
||||
created_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class MemberProfileUpdate(BaseModel):
|
||||
first_name: Optional[str] = None
|
||||
last_name: Optional[str] = None
|
||||
phone: Optional[str] = None
|
||||
address: Optional[str] = None
|
||||
emergency_contact: Optional[str] = None
|
||||
notifications_enabled: Optional[bool] = None
|
||||
|
||||
|
||||
class MemberOnboardingResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
email: str
|
||||
first_name: str
|
||||
last_name: str
|
||||
phone: Optional[str]
|
||||
address: Optional[str]
|
||||
emergency_contact: Optional[str]
|
||||
notifications_enabled: bool
|
||||
onboarding_data: Optional[Any]
|
||||
is_claimed: bool
|
||||
member_status: str
|
||||
claimed_at: Optional[datetime]
|
||||
onboarding_completed_at: Optional[datetime]
|
||||
contract_signed_at: Optional[datetime]
|
||||
contract_signer_name: Optional[str]
|
||||
contract_version: Optional[str]
|
||||
activated_at: Optional[datetime]
|
||||
created_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class MemberOnboardingUpdate(BaseModel):
|
||||
first_name: Optional[str] = None
|
||||
last_name: Optional[str] = None
|
||||
phone: Optional[str] = None
|
||||
address: Optional[str] = None
|
||||
emergency_contact: Optional[str] = None
|
||||
onboarding_data: Optional[Any] = None
|
||||
complete_onboarding: bool = False
|
||||
|
||||
|
||||
class ContractSignSchema(BaseModel):
|
||||
signer_name: str
|
||||
agreed: bool
|
||||
contract_version: Optional[str] = None
|
||||
|
||||
|
||||
# ── Walks ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
class WalkResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
service_type: str
|
||||
walked_at: datetime
|
||||
duration_minutes: int
|
||||
notes: Optional[str]
|
||||
recorded_by: Optional[str]
|
||||
created_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
# ── Bookings ───────────────────────────────────────────────────────────────────
|
||||
|
||||
class BookingCreate(BaseModel):
|
||||
service_type: str
|
||||
requested_day: Optional[str] = None
|
||||
requested_date: Optional[datetime] = None
|
||||
requested_timeslot: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class BookingResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
service_type: str
|
||||
requested_date: Optional[datetime]
|
||||
status: str
|
||||
notes: Optional[str]
|
||||
admin_notes: Optional[str]
|
||||
created_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class BookingSlotAvailabilityResponse(BaseModel):
|
||||
slot: str
|
||||
label: str
|
||||
booked: int
|
||||
capacity: int
|
||||
remaining: int
|
||||
is_available: bool
|
||||
|
||||
|
||||
class BookingAvailabilityDayResponse(BaseModel):
|
||||
date: str
|
||||
label: str
|
||||
slots: list[BookingSlotAvailabilityResponse]
|
||||
|
||||
|
||||
class BookingAvailabilityResponse(BaseModel):
|
||||
requested_date: str
|
||||
selected: BookingAvailabilityDayResponse
|
||||
alternatives: list[BookingAvailabilityDayResponse]
|
||||
|
||||
|
||||
# ── Messages ───────────────────────────────────────────────────────────────────
|
||||
|
||||
class MessageResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
subject: str
|
||||
body: str
|
||||
sent_by: Optional[str]
|
||||
read_at: Optional[datetime]
|
||||
created_at: datetime
|
||||
direction: str = "inbound"
|
||||
reply_to_id: Optional[uuid.UUID] = None
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class MemberReplySchema(BaseModel):
|
||||
body: str
|
||||
|
||||
|
||||
# ── Admin: Create Member ───────────────────────────────────────────────────────
|
||||
|
||||
class AdminCreateMember(BaseModel):
|
||||
email: str
|
||||
first_name: str
|
||||
last_name: str
|
||||
phone: Optional[str] = None
|
||||
address: Optional[str] = None
|
||||
emergency_contact: Optional[str] = None
|
||||
onboarding_data: Optional[Any] = None
|
||||
service_pricing_overrides: Optional[Any] = None
|
||||
force_two_factor: Optional[bool] = None
|
||||
|
||||
|
||||
class AdminMemberResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
email: str
|
||||
first_name: str
|
||||
last_name: str
|
||||
phone: Optional[str]
|
||||
address: Optional[str]
|
||||
emergency_contact: Optional[str]
|
||||
notifications_enabled: bool
|
||||
onboarding_data: Optional[Any]
|
||||
is_claimed: bool
|
||||
is_active: bool
|
||||
member_status: str
|
||||
claimed_at: Optional[datetime]
|
||||
onboarding_completed_at: Optional[datetime]
|
||||
contract_signed_at: Optional[datetime]
|
||||
contract_signer_name: Optional[str]
|
||||
contract_version: Optional[str]
|
||||
activated_at: Optional[datetime]
|
||||
service_pricing_overrides: Optional[Any]
|
||||
force_two_factor: Optional[bool]
|
||||
created_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class AdminMemberUpdate(BaseModel):
|
||||
first_name: Optional[str] = None
|
||||
last_name: Optional[str] = None
|
||||
phone: Optional[str] = None
|
||||
address: Optional[str] = None
|
||||
emergency_contact: Optional[str] = None
|
||||
notifications_enabled: Optional[bool] = None
|
||||
onboarding_data: Optional[Any] = None
|
||||
is_active: Optional[bool] = None
|
||||
member_status: Optional[str] = None
|
||||
service_pricing_overrides: Optional[Any] = None
|
||||
force_two_factor: Optional[bool] = None
|
||||
|
||||
|
||||
class AdminMemberToggleAction(BaseModel):
|
||||
enabled: bool
|
||||
|
||||
|
||||
class AdminBookingResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
member_id: uuid.UUID
|
||||
service_type: str
|
||||
requested_date: Optional[datetime]
|
||||
status: str
|
||||
notes: Optional[str]
|
||||
admin_notes: Optional[str]
|
||||
created_at: datetime
|
||||
# Joined fields
|
||||
member_first_name: Optional[str] = None
|
||||
member_last_name: Optional[str] = None
|
||||
member_email: Optional[str] = None
|
||||
member_dog_name: Optional[str] = None
|
||||
member_dog_breed: Optional[str] = None
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class AdminBookingCreate(BaseModel):
|
||||
member_id: uuid.UUID
|
||||
service_type: str
|
||||
requested_date: Optional[datetime] = None
|
||||
status: str = "confirmed"
|
||||
notes: Optional[str] = None
|
||||
admin_notes: Optional[str] = None
|
||||
|
||||
|
||||
class AdminBookingUpdate(BaseModel):
|
||||
requested_date: Optional[datetime] = None
|
||||
status: Optional[str] = None # pending | confirmed | cancelled | completed
|
||||
notes: Optional[str] = None
|
||||
admin_notes: Optional[str] = None
|
||||
|
||||
|
||||
# ── Admin: Record Walk ─────────────────────────────────────────────────────────
|
||||
|
||||
class AdminRecordWalk(BaseModel):
|
||||
member_id: uuid.UUID
|
||||
walked_at: datetime
|
||||
service_type: str
|
||||
duration_minutes: int = 60
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
# ── Admin: Send Message ────────────────────────────────────────────────────────
|
||||
|
||||
class AdminSendMessage(BaseModel):
|
||||
member_id: uuid.UUID
|
||||
subject: str
|
||||
body: str
|
||||
|
||||
|
||||
class AdminNotificationSettingsResponse(BaseModel):
|
||||
automatic_member_notifications_enabled: bool
|
||||
nz_public_holiday_notifications_enabled: bool
|
||||
invoice_reminder_notifications_enabled: bool
|
||||
invoice_day_of_week: int
|
||||
|
||||
|
||||
class AdminNotificationSettingsUpdate(BaseModel):
|
||||
automatic_member_notifications_enabled: Optional[bool] = None
|
||||
nz_public_holiday_notifications_enabled: Optional[bool] = None
|
||||
invoice_reminder_notifications_enabled: Optional[bool] = None
|
||||
invoice_day_of_week: Optional[int] = None
|
||||
|
||||
|
||||
class AdminNotificationRunResponse(BaseModel):
|
||||
automatic_member_notifications_enabled: bool
|
||||
public_holiday_messages_sent: int
|
||||
invoice_reminders_sent: int
|
||||
|
||||
|
||||
class AdminNotificationFeedItemResponse(BaseModel):
|
||||
id: str
|
||||
type: str
|
||||
title: str
|
||||
description: str
|
||||
created_at: datetime
|
||||
href: str
|
||||
|
||||
|
||||
class AdminNotificationsResponse(BaseModel):
|
||||
items: list[AdminNotificationFeedItemResponse]
|
||||
total: int
|
||||
settings: AdminNotificationSettingsResponse
|
||||
|
||||
|
||||
class AdminMessageHistoryResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
member_id: uuid.UUID
|
||||
member_name: str
|
||||
member_email: str
|
||||
subject: str
|
||||
body: str
|
||||
sent_by: Optional[str]
|
||||
created_at: datetime
|
||||
read_at: Optional[datetime]
|
||||
|
||||
|
||||
# ── Contract ───────────────────────────────────────────────────────────────────
|
||||
|
||||
class ContractResponse(BaseModel):
|
||||
onboarding_data: Optional[Any]
|
||||
member_name: str
|
||||
email: str
|
||||
member_status: str
|
||||
contract_signed_at: Optional[datetime]
|
||||
contract_signer_name: Optional[str]
|
||||
contract_version: Optional[str]
|
||||
activated_at: Optional[datetime]
|
||||
joined_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
@@ -0,0 +1,36 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class PageBase(BaseModel):
|
||||
title: str
|
||||
slug: str
|
||||
body: str = ""
|
||||
meta_title: Optional[str] = None
|
||||
meta_description: Optional[str] = None
|
||||
og_image_url: Optional[str] = None
|
||||
published: bool = False
|
||||
|
||||
|
||||
class PageCreate(PageBase):
|
||||
pass
|
||||
|
||||
|
||||
class PageUpdate(BaseModel):
|
||||
title: Optional[str] = None
|
||||
slug: Optional[str] = None
|
||||
body: Optional[str] = None
|
||||
meta_title: Optional[str] = None
|
||||
meta_description: Optional[str] = None
|
||||
og_image_url: Optional[str] = None
|
||||
published: Optional[bool] = None
|
||||
|
||||
|
||||
class PageResponse(PageBase):
|
||||
id: uuid.UUID
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
@@ -0,0 +1,46 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class PostBase(BaseModel):
|
||||
title: str
|
||||
slug: str
|
||||
excerpt: Optional[str] = None
|
||||
body: str = ""
|
||||
author: Optional[str] = None
|
||||
featured_image_url: Optional[str] = None
|
||||
tags: List[str] = []
|
||||
published: bool = False
|
||||
|
||||
|
||||
class PostCreate(PostBase):
|
||||
pass
|
||||
|
||||
|
||||
class PostUpdate(BaseModel):
|
||||
title: Optional[str] = None
|
||||
slug: Optional[str] = None
|
||||
excerpt: Optional[str] = None
|
||||
body: Optional[str] = None
|
||||
author: Optional[str] = None
|
||||
featured_image_url: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
published: Optional[bool] = None
|
||||
|
||||
|
||||
class PostResponse(PostBase):
|
||||
id: uuid.UUID
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class PaginatedPostsResponse(BaseModel):
|
||||
items: List[PostResponse]
|
||||
total: int
|
||||
page: int
|
||||
per_page: int
|
||||
total_pages: int
|
||||
@@ -0,0 +1,91 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Optional, Dict, Any
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
from app.services.pricing import default_service_pricing
|
||||
|
||||
|
||||
class SiteSettingsBase(BaseModel):
|
||||
site_name: str = ""
|
||||
tagline: Optional[str] = None
|
||||
logo_url: Optional[str] = None
|
||||
footer_text: Optional[str] = None
|
||||
social_links: Dict[str, Any] = {}
|
||||
automatic_member_notifications_enabled: bool = True
|
||||
nz_public_holiday_notifications_enabled: bool = True
|
||||
invoice_reminder_notifications_enabled: bool = True
|
||||
invoice_day_of_week: int = 1
|
||||
bookings_enabled: bool = True
|
||||
walks_enabled: bool = True
|
||||
messages_enabled: bool = True
|
||||
two_factor_enabled: bool = True
|
||||
audit_history_enabled: bool = True
|
||||
experiments_enabled: bool = True
|
||||
|
||||
|
||||
class SiteSettingsUpdate(BaseModel):
|
||||
site_name: Optional[str] = None
|
||||
tagline: Optional[str] = None
|
||||
logo_url: Optional[str] = None
|
||||
footer_text: Optional[str] = None
|
||||
social_links: Optional[Dict[str, Any]] = None
|
||||
automatic_member_notifications_enabled: Optional[bool] = None
|
||||
nz_public_holiday_notifications_enabled: Optional[bool] = None
|
||||
invoice_reminder_notifications_enabled: Optional[bool] = None
|
||||
invoice_day_of_week: Optional[int] = None
|
||||
bookings_enabled: Optional[bool] = None
|
||||
walks_enabled: Optional[bool] = None
|
||||
messages_enabled: Optional[bool] = None
|
||||
two_factor_enabled: Optional[bool] = None
|
||||
audit_history_enabled: Optional[bool] = None
|
||||
experiments_enabled: Optional[bool] = None
|
||||
|
||||
|
||||
class FeatureSettingsBase(BaseModel):
|
||||
bookings_enabled: bool = True
|
||||
walks_enabled: bool = True
|
||||
messages_enabled: bool = True
|
||||
two_factor_enabled: bool = True
|
||||
audit_history_enabled: bool = True
|
||||
experiments_enabled: bool = True
|
||||
|
||||
|
||||
class FeatureSettingsUpdate(BaseModel):
|
||||
bookings_enabled: Optional[bool] = None
|
||||
walks_enabled: Optional[bool] = None
|
||||
messages_enabled: Optional[bool] = None
|
||||
two_factor_enabled: Optional[bool] = None
|
||||
audit_history_enabled: Optional[bool] = None
|
||||
experiments_enabled: Optional[bool] = None
|
||||
|
||||
|
||||
class FeatureSettingsResponse(FeatureSettingsBase):
|
||||
pass
|
||||
|
||||
|
||||
class ServicePricingSettingsResponse(BaseModel):
|
||||
service_pricing: Dict[str, Any] = Field(default_factory=default_service_pricing)
|
||||
|
||||
|
||||
class ServicePricingSettingsUpdate(BaseModel):
|
||||
service_pricing: Dict[str, Any]
|
||||
|
||||
|
||||
class PlannerWeatherDay(BaseModel):
|
||||
code: int
|
||||
max: int
|
||||
min: int
|
||||
|
||||
|
||||
class PlannerWeatherResponse(BaseModel):
|
||||
fetched_at: datetime
|
||||
weather: Dict[str, PlannerWeatherDay]
|
||||
|
||||
|
||||
class SiteSettingsResponse(SiteSettingsBase):
|
||||
id: uuid.UUID
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
@@ -0,0 +1,393 @@
|
||||
from datetime import date, timedelta
|
||||
|
||||
from sqlalchemy import Date, case, cast, func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.analytics import AnalyticsEvent
|
||||
from app.models.member import Booking, Member
|
||||
from app.schemas.analytics import EventCreate
|
||||
|
||||
|
||||
async def record_event(
|
||||
db: AsyncSession,
|
||||
data: EventCreate,
|
||||
ip_hash: str | None,
|
||||
ip_partial: str | None = None,
|
||||
user_agent: str | None = None,
|
||||
browser: str | None = None,
|
||||
os_name: str | None = None,
|
||||
country: str | None = None,
|
||||
city: str | None = None,
|
||||
) -> AnalyticsEvent:
|
||||
"""Insert a new analytics event and return it."""
|
||||
event = AnalyticsEvent(
|
||||
event_type=data.event_type,
|
||||
page=data.page,
|
||||
element=data.element,
|
||||
metadata_=data.metadata,
|
||||
session_id=data.session_id,
|
||||
ip_hash=ip_hash,
|
||||
ip_partial=ip_partial,
|
||||
user_agent=user_agent,
|
||||
browser=browser,
|
||||
os_name=os_name,
|
||||
country=country,
|
||||
city=city,
|
||||
)
|
||||
db.add(event)
|
||||
await db.commit()
|
||||
await db.refresh(event)
|
||||
return event
|
||||
|
||||
|
||||
async def get_summary(db: AsyncSession) -> dict:
|
||||
"""Return all summary data needed for AnalyticsSummary."""
|
||||
today = date.today()
|
||||
yesterday = today - timedelta(days=1)
|
||||
week_ago = today - timedelta(days=6)
|
||||
|
||||
date_col = cast(AnalyticsEvent.created_at, Date)
|
||||
|
||||
# Total events today
|
||||
result = await db.execute(
|
||||
select(func.count()).select_from(AnalyticsEvent).where(date_col == today)
|
||||
)
|
||||
total_events_today = result.scalar_one()
|
||||
|
||||
# Total events yesterday
|
||||
result = await db.execute(
|
||||
select(func.count()).select_from(AnalyticsEvent).where(date_col == yesterday)
|
||||
)
|
||||
total_events_yesterday = result.scalar_one()
|
||||
|
||||
# Page views today
|
||||
result = await db.execute(
|
||||
select(func.count())
|
||||
.select_from(AnalyticsEvent)
|
||||
.where(date_col == today)
|
||||
.where(AnalyticsEvent.event_type == "page_view")
|
||||
)
|
||||
page_views_today = result.scalar_one()
|
||||
|
||||
# Unique sessions today
|
||||
result = await db.execute(
|
||||
select(func.count(AnalyticsEvent.session_id.distinct()))
|
||||
.select_from(AnalyticsEvent)
|
||||
.where(date_col == today)
|
||||
)
|
||||
unique_sessions_today = result.scalar_one()
|
||||
|
||||
# Unique sessions total
|
||||
result = await db.execute(
|
||||
select(func.count(AnalyticsEvent.session_id.distinct())).select_from(AnalyticsEvent)
|
||||
)
|
||||
unique_sessions_total = result.scalar_one()
|
||||
|
||||
# Total events all time
|
||||
result = await db.execute(
|
||||
select(func.count()).select_from(AnalyticsEvent)
|
||||
)
|
||||
total_events_all_time = result.scalar_one()
|
||||
|
||||
# Events by type (top 10, all time)
|
||||
result = await db.execute(
|
||||
select(AnalyticsEvent.event_type, func.count().label("cnt"))
|
||||
.group_by(AnalyticsEvent.event_type)
|
||||
.order_by(func.count().desc())
|
||||
.limit(10)
|
||||
)
|
||||
events_by_type = [{"label": r.event_type, "count": r.cnt} for r in result.all()]
|
||||
|
||||
# Top pages (page_view events, top 10)
|
||||
result = await db.execute(
|
||||
select(AnalyticsEvent.page, func.count().label("cnt"))
|
||||
.where(AnalyticsEvent.event_type == "page_view")
|
||||
.group_by(AnalyticsEvent.page)
|
||||
.order_by(func.count().desc())
|
||||
.limit(10)
|
||||
)
|
||||
top_pages = [{"label": r.page, "count": r.cnt} for r in result.all()]
|
||||
|
||||
# Top elements (non page_view, top 10)
|
||||
result = await db.execute(
|
||||
select(AnalyticsEvent.element, func.count().label("cnt"))
|
||||
.where(AnalyticsEvent.event_type != "page_view")
|
||||
.where(AnalyticsEvent.element.isnot(None))
|
||||
.group_by(AnalyticsEvent.element)
|
||||
.order_by(func.count().desc())
|
||||
.limit(10)
|
||||
)
|
||||
top_elements = [{"label": r.element, "count": r.cnt} for r in result.all()]
|
||||
|
||||
# Top journeys (page-to-page flows derived from page_view events per session)
|
||||
result = await db.execute(
|
||||
select(
|
||||
AnalyticsEvent.session_id,
|
||||
AnalyticsEvent.page,
|
||||
)
|
||||
.where(AnalyticsEvent.event_type == "page_view")
|
||||
.order_by(AnalyticsEvent.session_id, AnalyticsEvent.created_at, AnalyticsEvent.id)
|
||||
)
|
||||
|
||||
journey_counts: dict[str, int] = {}
|
||||
current_session = None
|
||||
previous_page = None
|
||||
|
||||
for row in result.all():
|
||||
if row.session_id != current_session:
|
||||
current_session = row.session_id
|
||||
previous_page = None
|
||||
|
||||
if row.page == previous_page:
|
||||
continue
|
||||
|
||||
if previous_page is not None:
|
||||
journey = f"{previous_page} -> {row.page}"
|
||||
journey_counts[journey] = journey_counts.get(journey, 0) + 1
|
||||
|
||||
previous_page = row.page
|
||||
|
||||
top_journeys = [
|
||||
{"label": label, "count": count}
|
||||
for label, count in sorted(
|
||||
journey_counts.items(),
|
||||
key=lambda item: (-item[1], item[0]),
|
||||
)[:10]
|
||||
]
|
||||
|
||||
# Top browsers
|
||||
result = await db.execute(
|
||||
select(AnalyticsEvent.browser, func.count().label("cnt"))
|
||||
.where(AnalyticsEvent.browser.isnot(None))
|
||||
.group_by(AnalyticsEvent.browser)
|
||||
.order_by(func.count().desc())
|
||||
.limit(8)
|
||||
)
|
||||
top_browsers = [{"label": r.browser, "count": r.cnt} for r in result.all()]
|
||||
|
||||
# Top OS
|
||||
result = await db.execute(
|
||||
select(AnalyticsEvent.os_name, func.count().label("cnt"))
|
||||
.where(AnalyticsEvent.os_name.isnot(None))
|
||||
.group_by(AnalyticsEvent.os_name)
|
||||
.order_by(func.count().desc())
|
||||
.limit(8)
|
||||
)
|
||||
top_os = [{"label": r.os_name, "count": r.cnt} for r in result.all()]
|
||||
|
||||
# Top countries
|
||||
result = await db.execute(
|
||||
select(AnalyticsEvent.country, func.count().label("cnt"))
|
||||
.where(AnalyticsEvent.country.isnot(None))
|
||||
.group_by(AnalyticsEvent.country)
|
||||
.order_by(func.count().desc())
|
||||
.limit(8)
|
||||
)
|
||||
top_countries = [{"label": r.country, "count": r.cnt} for r in result.all()]
|
||||
|
||||
# Last 7 days counts
|
||||
result = await db.execute(
|
||||
select(date_col.label("day"), func.count().label("cnt"))
|
||||
.where(date_col >= week_ago)
|
||||
.group_by(date_col)
|
||||
.order_by(date_col)
|
||||
)
|
||||
days = {str(r.day): r.cnt for r in result.all()}
|
||||
|
||||
last_7 = []
|
||||
for i in range(6, -1, -1):
|
||||
d = str(today - timedelta(days=i))
|
||||
last_7.append({"date": d, "count": days.get(d, 0)})
|
||||
|
||||
# Recent events (last 30)
|
||||
result = await db.execute(
|
||||
select(AnalyticsEvent)
|
||||
.order_by(AnalyticsEvent.created_at.desc())
|
||||
.limit(30)
|
||||
)
|
||||
recent = list(result.scalars().all())
|
||||
|
||||
return {
|
||||
"total_events_today": total_events_today,
|
||||
"total_events_yesterday": total_events_yesterday,
|
||||
"page_views_today": page_views_today,
|
||||
"unique_sessions_today": unique_sessions_today,
|
||||
"unique_sessions_total": unique_sessions_total,
|
||||
"total_events_all_time": total_events_all_time,
|
||||
"events_by_type": events_by_type,
|
||||
"top_pages": top_pages,
|
||||
"top_elements": top_elements,
|
||||
"top_journeys": top_journeys,
|
||||
"top_browsers": top_browsers,
|
||||
"top_os": top_os,
|
||||
"top_countries": top_countries,
|
||||
"events_last_7_days": last_7,
|
||||
"recent_events": recent,
|
||||
}
|
||||
|
||||
|
||||
async def get_booking_operations_summary(db: AsyncSession) -> dict:
|
||||
"""Return booking operations reporting for the admin Reporting page."""
|
||||
today = date.today()
|
||||
activity_start = today - timedelta(days=29)
|
||||
forward_load_end = today + timedelta(days=13)
|
||||
|
||||
created_date_col = cast(Booking.created_at, Date)
|
||||
updated_date_col = cast(Booking.updated_at, Date)
|
||||
requested_date_col = cast(Booking.requested_date, Date)
|
||||
|
||||
active_statuses = ("pending", "confirmed", "completed")
|
||||
forward_statuses = ("pending", "confirmed")
|
||||
|
||||
active_total_result = await db.execute(
|
||||
select(func.count())
|
||||
.select_from(Booking)
|
||||
.where(Booking.status.in_(active_statuses))
|
||||
)
|
||||
active_bookings_total = active_total_result.scalar_one()
|
||||
|
||||
forward_load_total_result = await db.execute(
|
||||
select(func.count())
|
||||
.select_from(Booking)
|
||||
.where(Booking.status.in_(forward_statuses))
|
||||
.where(Booking.requested_date.is_not(None))
|
||||
.where(requested_date_col >= today)
|
||||
.where(requested_date_col <= forward_load_end)
|
||||
)
|
||||
forward_load_total = forward_load_total_result.scalar_one()
|
||||
|
||||
booked_last_30_days_result = await db.execute(
|
||||
select(func.count())
|
||||
.select_from(Booking)
|
||||
.where(created_date_col >= activity_start)
|
||||
.where(created_date_col <= today)
|
||||
)
|
||||
booked_last_30_days = booked_last_30_days_result.scalar_one()
|
||||
|
||||
cancellations_last_30_days_result = await db.execute(
|
||||
select(func.count())
|
||||
.select_from(Booking)
|
||||
.where(Booking.status == "cancelled")
|
||||
.where(updated_date_col >= activity_start)
|
||||
.where(updated_date_col <= today)
|
||||
)
|
||||
cancellations_last_30_days = cancellations_last_30_days_result.scalar_one()
|
||||
|
||||
high_volume_result = await db.execute(
|
||||
select(func.count().label("booking_count"))
|
||||
.select_from(Booking)
|
||||
.where(Booking.status.in_(forward_statuses))
|
||||
.where(Booking.requested_date.is_not(None))
|
||||
.where(requested_date_col >= today)
|
||||
.group_by(Booking.member_id)
|
||||
.having(func.count() >= 3)
|
||||
)
|
||||
high_volume_customer_count = len(high_volume_result.all())
|
||||
|
||||
forward_load_result = await db.execute(
|
||||
select(
|
||||
requested_date_col.label("day"),
|
||||
func.count().label("total"),
|
||||
func.sum(
|
||||
case(
|
||||
(func.extract("hour", Booking.requested_date) < 12, 1),
|
||||
else_=0,
|
||||
)
|
||||
).label("am"),
|
||||
func.sum(
|
||||
case(
|
||||
(func.extract("hour", Booking.requested_date) >= 12, 1),
|
||||
else_=0,
|
||||
)
|
||||
).label("pm"),
|
||||
)
|
||||
.where(Booking.status.in_(forward_statuses))
|
||||
.where(Booking.requested_date.is_not(None))
|
||||
.where(requested_date_col >= today)
|
||||
.where(requested_date_col <= forward_load_end)
|
||||
.group_by(requested_date_col)
|
||||
.order_by(requested_date_col)
|
||||
)
|
||||
forward_load_by_day = {
|
||||
str(row.day): {
|
||||
"total": int(row.total or 0),
|
||||
"am": int(row.am or 0),
|
||||
"pm": int(row.pm or 0),
|
||||
}
|
||||
for row in forward_load_result.all()
|
||||
}
|
||||
|
||||
forward_load_next_14_days = []
|
||||
for offset in range(14):
|
||||
current_day = str(today + timedelta(days=offset))
|
||||
values = forward_load_by_day.get(current_day, {"total": 0, "am": 0, "pm": 0})
|
||||
forward_load_next_14_days.append({
|
||||
"date": current_day,
|
||||
"total": values["total"],
|
||||
"am": values["am"],
|
||||
"pm": values["pm"],
|
||||
})
|
||||
|
||||
booked_activity_result = await db.execute(
|
||||
select(created_date_col.label("day"), func.count().label("count"))
|
||||
.where(created_date_col >= activity_start)
|
||||
.where(created_date_col <= today)
|
||||
.group_by(created_date_col)
|
||||
.order_by(created_date_col)
|
||||
)
|
||||
booked_by_day = {str(row.day): int(row.count or 0) for row in booked_activity_result.all()}
|
||||
|
||||
cancellation_activity_result = await db.execute(
|
||||
select(updated_date_col.label("day"), func.count().label("count"))
|
||||
.where(Booking.status == "cancelled")
|
||||
.where(updated_date_col >= activity_start)
|
||||
.where(updated_date_col <= today)
|
||||
.group_by(updated_date_col)
|
||||
.order_by(updated_date_col)
|
||||
)
|
||||
cancellations_by_day = {
|
||||
str(row.day): int(row.count or 0)
|
||||
for row in cancellation_activity_result.all()
|
||||
}
|
||||
|
||||
activity_last_30_days = []
|
||||
for offset in range(30):
|
||||
current_day = str(activity_start + timedelta(days=offset))
|
||||
activity_last_30_days.append({
|
||||
"date": current_day,
|
||||
"booked": booked_by_day.get(current_day, 0),
|
||||
"cancellations": cancellations_by_day.get(current_day, 0),
|
||||
})
|
||||
|
||||
volume_result = await db.execute(
|
||||
select(
|
||||
Member.first_name,
|
||||
Member.last_name,
|
||||
func.count(Booking.id).label("count"),
|
||||
)
|
||||
.join(Member, Booking.member_id == Member.id)
|
||||
.where(Booking.status.in_(forward_statuses))
|
||||
.where(Booking.requested_date.is_not(None))
|
||||
.where(requested_date_col >= today)
|
||||
.group_by(Member.id, Member.first_name, Member.last_name)
|
||||
.order_by(func.count(Booking.id).desc(), Member.first_name.asc(), Member.last_name.asc())
|
||||
.limit(8)
|
||||
)
|
||||
top_high_volume_customers = [
|
||||
{
|
||||
"label": " ".join(part for part in [row.first_name, row.last_name] if part).strip() or "Client",
|
||||
"count": int(row.count or 0),
|
||||
}
|
||||
for row in volume_result.all()
|
||||
]
|
||||
|
||||
return {
|
||||
"active_bookings_total": int(active_bookings_total or 0),
|
||||
"forward_load_total": int(forward_load_total or 0),
|
||||
"booked_last_30_days": int(booked_last_30_days or 0),
|
||||
"cancellations_last_30_days": int(cancellations_last_30_days or 0),
|
||||
"high_volume_customer_count": int(high_volume_customer_count or 0),
|
||||
"forward_load_next_14_days": forward_load_next_14_days,
|
||||
"activity_last_30_days": activity_last_30_days,
|
||||
"top_high_volume_customers": top_high_volume_customers,
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
Audit logging service.
|
||||
|
||||
Call `log_audit(db, ...)` from within any request handler that already holds
|
||||
an open AsyncSession. The entry is added to the session — it will be
|
||||
committed with the surrounding transaction.
|
||||
|
||||
For error logging outside a request session (e.g. exception middleware), open
|
||||
a fresh session via `AsyncSessionLocal`, call `log_audit`, then `commit`.
|
||||
"""
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.audit import AuditLog
|
||||
from app.services.settings import get_feature_settings_snapshot
|
||||
|
||||
|
||||
async def log_audit(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
action_type: str,
|
||||
area: str,
|
||||
description: str,
|
||||
member_id: Optional[uuid.UUID] = None,
|
||||
member_email: Optional[str] = None,
|
||||
status: str = "success",
|
||||
booking_id: Optional[uuid.UUID] = None,
|
||||
error_message: Optional[str] = None,
|
||||
error_detail: Optional[str] = None,
|
||||
ip_address: Optional[str] = None,
|
||||
user_agent: Optional[str] = None,
|
||||
extra: Optional[dict] = None,
|
||||
) -> None:
|
||||
feature_settings = await get_feature_settings_snapshot(db)
|
||||
if not feature_settings.audit_history_enabled:
|
||||
return
|
||||
|
||||
entry = AuditLog(
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
member_id=member_id,
|
||||
member_email=member_email,
|
||||
action_type=action_type,
|
||||
area=area,
|
||||
description=description,
|
||||
status=status,
|
||||
booking_id=booking_id,
|
||||
error_message=error_message,
|
||||
error_detail=error_detail,
|
||||
ip_address=ip_address,
|
||||
user_agent=user_agent,
|
||||
extra=extra,
|
||||
)
|
||||
db.add(entry)
|
||||
@@ -0,0 +1,166 @@
|
||||
"""
|
||||
Email sending service.
|
||||
|
||||
In development (SMTP_HOST unset or EMAIL_BACKEND=console), codes are printed to
|
||||
stdout instead of being sent. In production set:
|
||||
|
||||
SMTP_HOST, SMTP_PORT, SMTP_USER, SMTP_PASSWORD, EMAIL_FROM
|
||||
"""
|
||||
import asyncio
|
||||
import smtplib
|
||||
import ssl
|
||||
import logging
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from functools import partial
|
||||
|
||||
from app.config import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _send_smtp_blocking(to_address: str, subject: str, html_body: str, text_body: str) -> None:
|
||||
context = ssl.create_default_context()
|
||||
with smtplib.SMTP(settings.SMTP_HOST, settings.SMTP_PORT) as server:
|
||||
if settings.SMTP_USE_TLS:
|
||||
server.starttls(context=context)
|
||||
if settings.SMTP_USER and settings.SMTP_PASSWORD:
|
||||
server.login(settings.SMTP_USER, settings.SMTP_PASSWORD)
|
||||
|
||||
msg = MIMEMultipart("alternative")
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = settings.EMAIL_FROM
|
||||
msg["To"] = to_address
|
||||
msg.attach(MIMEText(text_body, "plain"))
|
||||
msg.attach(MIMEText(html_body, "html"))
|
||||
server.sendmail(settings.EMAIL_FROM, to_address, msg.as_string())
|
||||
|
||||
|
||||
async def send_email(to_address: str, subject: str, html_body: str, text_body: str) -> None:
|
||||
if settings.EMAIL_BACKEND == "console" or not settings.SMTP_HOST:
|
||||
logger.info(
|
||||
"\n%s\nTO: %s\nSUBJECT: %s\n%s\n%s",
|
||||
"=" * 60,
|
||||
to_address,
|
||||
subject,
|
||||
text_body,
|
||||
"=" * 60,
|
||||
)
|
||||
print(f"\n{'='*60}\nEMAIL TO: {to_address}\nSUBJECT: {subject}\n{text_body}\n{'='*60}\n")
|
||||
return
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
fn = partial(_send_smtp_blocking, to_address, subject, html_body, text_body)
|
||||
await loop.run_in_executor(None, fn)
|
||||
|
||||
|
||||
# ── Template helpers ───────────────────────────────────────────────────────────
|
||||
|
||||
def _base_html(content: str) -> str:
|
||||
return f"""<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<style>
|
||||
body {{ font-family: 'Readex Pro', Arial, sans-serif; background: #FBFBFB; margin: 0; padding: 0; }}
|
||||
.container {{ max-width: 560px; margin: 40px auto; background: #fff; border-radius: 16px;
|
||||
overflow: hidden; box-shadow: 0 4px 24px rgba(0,40,66,.10); }}
|
||||
.header {{ background: #002842; padding: 32px 40px; text-align: center; }}
|
||||
.header h1 {{ color: #FFD100; font-family: 'Fredoka One', Arial, sans-serif;
|
||||
font-size: 28px; margin: 0; letter-spacing: .5px; }}
|
||||
.header p {{ color: #E5EEFF; margin: 6px 0 0; font-size: 14px; }}
|
||||
.body {{ padding: 36px 40px; color: #2E3031; }}
|
||||
.body p {{ line-height: 1.6; margin: 0 0 16px; }}
|
||||
.code-box {{ background: #E5EEFF; border-radius: 12px; padding: 20px;
|
||||
text-align: center; margin: 24px 0; }}
|
||||
.code {{ font-size: 36px; font-weight: 700; letter-spacing: 10px; color: #002842;
|
||||
font-family: 'Fredoka One', monospace; }}
|
||||
.footer {{ background: #F4F6FB; padding: 20px 40px; text-align: center;
|
||||
font-size: 12px; color: #888; border-top: 1px solid #E5EEFF; }}
|
||||
.expiry {{ color: #888; font-size: 13px; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="header">
|
||||
<h1>🐾 Goodwalk</h1>
|
||||
<p>Auckland's favourite dog walking service</p>
|
||||
</div>
|
||||
{content}
|
||||
<div class="footer">
|
||||
<p>Goodwalk — Auckland, New Zealand<br>
|
||||
<a href="mailto:info@goodwalk.co.nz" style="color:#FFD100;">info@goodwalk.co.nz</a>
|
||||
</p>
|
||||
<p>If you didn't request this email, you can safely ignore it.</p>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>"""
|
||||
|
||||
|
||||
async def send_claim_code(to_address: str, first_name: str, code: str) -> None:
|
||||
subject = "Claim your Goodwalk Members Account"
|
||||
html_body = _base_html(f"""
|
||||
<div class="body">
|
||||
<p>Hi {first_name}!</p>
|
||||
<p>Welcome to the Goodwalk Members Area. Use the code below to claim your account.
|
||||
It expires in <strong>15 minutes</strong>.</p>
|
||||
<div class="code-box">
|
||||
<div class="code">{code}</div>
|
||||
</div>
|
||||
<p class="expiry">This code is valid for 15 minutes and can only be used once.</p>
|
||||
<p>If you didn't request this, please contact us at
|
||||
<a href="mailto:info@goodwalk.co.nz">info@goodwalk.co.nz</a>.</p>
|
||||
</div>""")
|
||||
text_body = (
|
||||
f"Hi {first_name},\n\n"
|
||||
f"Your Goodwalk account claim code is: {code}\n\n"
|
||||
"This code expires in 15 minutes.\n\n"
|
||||
"If you didn't request this, please ignore this email."
|
||||
)
|
||||
await send_email(to_address, subject, html_body, text_body)
|
||||
|
||||
|
||||
async def send_login_2fa(to_address: str, first_name: str, code: str) -> None:
|
||||
subject = "Your Goodwalk login code"
|
||||
html_body = _base_html(f"""
|
||||
<div class="body">
|
||||
<p>Hi {first_name}!</p>
|
||||
<p>Here's your one-time login code for the Goodwalk Members Area.
|
||||
It expires in <strong>10 minutes</strong>.</p>
|
||||
<div class="code-box">
|
||||
<div class="code">{code}</div>
|
||||
</div>
|
||||
<p class="expiry">This code is valid for 10 minutes and can only be used once.</p>
|
||||
<p>If you didn't try to log in, please contact us immediately at
|
||||
<a href="mailto:info@goodwalk.co.nz">info@goodwalk.co.nz</a>.</p>
|
||||
</div>""")
|
||||
text_body = (
|
||||
f"Hi {first_name},\n\n"
|
||||
f"Your Goodwalk login code is: {code}\n\n"
|
||||
"This code expires in 10 minutes.\n\n"
|
||||
"If you didn't request this, please contact us immediately."
|
||||
)
|
||||
await send_email(to_address, subject, html_body, text_body)
|
||||
|
||||
|
||||
async def send_onboarding_invite(to_address: str, first_name: str, magic_url: str) -> None:
|
||||
subject = "You're invited to complete your Goodwalk onboarding"
|
||||
html_body = _base_html(f"""
|
||||
<div class="body">
|
||||
<p>Hi {first_name}!</p>
|
||||
<p>Thanks for getting in touch with Goodwalk. We've opened your onboarding invitation so you can complete your details and sign your service agreement.</p>
|
||||
<p><a href="{magic_url}" style="display:inline-block;padding:12px 18px;border-radius:12px;background:#FFD100;color:#002842;text-decoration:none;font-weight:700;">Start onboarding →</a></p>
|
||||
<p class="expiry">This link is valid for 7 days and can only be used once.</p>
|
||||
<p>Once your onboarding is complete and your contract is signed, we'll activate your members account.</p>
|
||||
<p>If you have any questions, reach us at <a href="mailto:info@goodwalk.co.nz">info@goodwalk.co.nz</a>.</p>
|
||||
</div>""")
|
||||
text_body = (
|
||||
f"Hi {first_name},\n\n"
|
||||
"We've opened your Goodwalk onboarding invitation.\n\n"
|
||||
f"Click this link to get started (valid for 7 days):\n{magic_url}\n\n"
|
||||
"Once your onboarding is complete and your contract is signed, we'll activate your members account.\n\n"
|
||||
"Questions? Email info@goodwalk.co.nz"
|
||||
)
|
||||
await send_email(to_address, subject, html_body, text_body)
|
||||
@@ -0,0 +1,251 @@
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
from sqlalchemy import case, func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.experiments.registry import EXPERIMENT_REGISTRY
|
||||
from app.models.experiment import Experiment, ExperimentEvent, ExperimentVariant
|
||||
from app.schemas.experiments import (
|
||||
ExperimentConversionCreate,
|
||||
ExperimentDefinitionResponse,
|
||||
ExperimentDefinitionUpdate,
|
||||
ExperimentEventCreate,
|
||||
ExperimentImpressionCreate,
|
||||
ExperimentResult,
|
||||
ExperimentVariantResult,
|
||||
)
|
||||
|
||||
|
||||
def experiment_exists(experiment_key: str, variant_key: str) -> bool:
|
||||
definition = EXPERIMENT_REGISTRY.get(experiment_key)
|
||||
if not definition:
|
||||
return False
|
||||
return any(variant["variant_key"] == variant_key for variant in definition["variants"])
|
||||
|
||||
|
||||
async def sync_experiment_registry(db: AsyncSession) -> None:
|
||||
result = await db.execute(
|
||||
select(Experiment).options(selectinload(Experiment.variants))
|
||||
)
|
||||
existing = {experiment.experiment_key: experiment for experiment in result.scalars().all()}
|
||||
|
||||
for definition in EXPERIMENT_REGISTRY.values():
|
||||
experiment = existing.get(definition["experiment_key"])
|
||||
existing_variants: dict[str, ExperimentVariant] = {}
|
||||
|
||||
if experiment is None:
|
||||
experiment = Experiment(
|
||||
experiment_key=definition["experiment_key"],
|
||||
cookie_name=definition["cookie_name"],
|
||||
name=definition["name"],
|
||||
description=definition.get("description"),
|
||||
enabled=definition["enabled"],
|
||||
eligible_routes=definition["eligible_routes"],
|
||||
)
|
||||
db.add(experiment)
|
||||
await db.flush()
|
||||
else:
|
||||
existing_variants = {variant.variant_key: variant for variant in experiment.variants}
|
||||
|
||||
for variant_definition in definition["variants"]:
|
||||
variant = existing_variants.get(variant_definition["variant_key"])
|
||||
if variant is None:
|
||||
db.add(
|
||||
ExperimentVariant(
|
||||
experiment_id=experiment.id,
|
||||
variant_key=variant_definition["variant_key"],
|
||||
label=variant_definition["label"],
|
||||
allocation=variant_definition["allocation"],
|
||||
is_control=variant_definition["is_control"],
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
variant.label = variant_definition["label"]
|
||||
variant.allocation = variant_definition["allocation"]
|
||||
variant.is_control = variant_definition["is_control"]
|
||||
|
||||
await db.flush()
|
||||
|
||||
|
||||
async def list_experiment_definitions(db: AsyncSession) -> list[ExperimentDefinitionResponse]:
|
||||
result = await db.execute(
|
||||
select(Experiment).options(selectinload(Experiment.variants)).order_by(Experiment.experiment_key)
|
||||
)
|
||||
experiments = result.scalars().all()
|
||||
|
||||
return [
|
||||
ExperimentDefinitionResponse(
|
||||
experiment_key=experiment.experiment_key,
|
||||
cookie_name=experiment.cookie_name,
|
||||
name=experiment.name,
|
||||
description=experiment.description,
|
||||
enabled=experiment.enabled,
|
||||
eligible_routes=experiment.eligible_routes,
|
||||
variants=[
|
||||
{
|
||||
"variant_key": variant.variant_key,
|
||||
"label": variant.label,
|
||||
"allocation": variant.allocation,
|
||||
"is_control": variant.is_control,
|
||||
}
|
||||
for variant in experiment.variants
|
||||
],
|
||||
)
|
||||
for experiment in experiments
|
||||
]
|
||||
|
||||
|
||||
async def record_experiment_event(
|
||||
db: AsyncSession,
|
||||
payload: ExperimentImpressionCreate | ExperimentEventCreate | ExperimentConversionCreate,
|
||||
) -> ExperimentEvent:
|
||||
conversion_value = getattr(payload, "conversion_value", None)
|
||||
timestamp = payload.timestamp
|
||||
if timestamp.tzinfo is None:
|
||||
timestamp = timestamp.replace(tzinfo=timezone.utc)
|
||||
|
||||
event = ExperimentEvent(
|
||||
experiment_key=payload.experiment_key,
|
||||
variant_key=payload.variant_key,
|
||||
session_id=payload.session_id,
|
||||
user_id=payload.user_id,
|
||||
path=payload.path,
|
||||
event_type=payload.event_name,
|
||||
conversion_value=conversion_value,
|
||||
metadata_=payload.metadata,
|
||||
created_at=timestamp.astimezone(timezone.utc).replace(tzinfo=None),
|
||||
)
|
||||
db.add(event)
|
||||
await db.flush()
|
||||
await db.refresh(event)
|
||||
return event
|
||||
|
||||
|
||||
async def get_experiment_results(db: AsyncSession, experiment_key: str | None = None) -> list[ExperimentResult]:
|
||||
stmt = select(
|
||||
ExperimentEvent.experiment_key,
|
||||
ExperimentEvent.variant_key,
|
||||
func.sum(case((ExperimentEvent.event_type == "impression", 1), else_=0)).label("impressions"),
|
||||
func.sum(case((ExperimentEvent.event_type == "cta_click", 1), else_=0)).label("cta_clicks"),
|
||||
func.sum(case((ExperimentEvent.event_type == "form_start", 1), else_=0)).label("form_starts"),
|
||||
func.sum(case((ExperimentEvent.event_type == "form_submit", 1), else_=0)).label("form_submits"),
|
||||
func.sum(case((ExperimentEvent.event_type == "conversion", 1), else_=0)).label("conversions"),
|
||||
func.count(func.distinct(ExperimentEvent.session_id)).label("unique_sessions"),
|
||||
func.coalesce(func.sum(ExperimentEvent.conversion_value), Decimal("0")).label("conversion_value_total"),
|
||||
).group_by(ExperimentEvent.experiment_key, ExperimentEvent.variant_key).order_by(
|
||||
ExperimentEvent.experiment_key,
|
||||
ExperimentEvent.variant_key,
|
||||
)
|
||||
|
||||
if experiment_key:
|
||||
stmt = stmt.where(ExperimentEvent.experiment_key == experiment_key)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
rows = result.all()
|
||||
|
||||
grouped: dict[str, list[ExperimentVariantResult]] = {}
|
||||
for row in rows:
|
||||
impressions = int(row.impressions or 0)
|
||||
conversions = int(row.conversions or 0)
|
||||
conversion_rate = conversions / impressions if impressions else 0.0
|
||||
|
||||
grouped.setdefault(row.experiment_key, []).append(
|
||||
ExperimentVariantResult(
|
||||
variant_key=row.variant_key,
|
||||
impressions=impressions,
|
||||
cta_clicks=int(row.cta_clicks or 0),
|
||||
form_starts=int(row.form_starts or 0),
|
||||
form_submits=int(row.form_submits or 0),
|
||||
conversions=conversions,
|
||||
unique_sessions=int(row.unique_sessions or 0),
|
||||
conversion_rate=round(conversion_rate, 4),
|
||||
conversion_value_total=float(row.conversion_value_total or 0),
|
||||
)
|
||||
)
|
||||
|
||||
generated_at = datetime.now(timezone.utc)
|
||||
return [
|
||||
ExperimentResult(
|
||||
experiment_key=key,
|
||||
generated_at=generated_at,
|
||||
variants=variants,
|
||||
)
|
||||
for key, variants in grouped.items()
|
||||
]
|
||||
|
||||
|
||||
async def get_experiment_definition(db: AsyncSession, experiment_key: str) -> Experiment | None:
|
||||
result = await db.execute(
|
||||
select(Experiment)
|
||||
.options(selectinload(Experiment.variants))
|
||||
.where(Experiment.experiment_key == experiment_key)
|
||||
)
|
||||
return result.scalars().first()
|
||||
|
||||
|
||||
async def upsert_experiment_definition(
|
||||
db: AsyncSession,
|
||||
experiment_key: str,
|
||||
payload: ExperimentDefinitionUpdate,
|
||||
) -> Experiment:
|
||||
experiment = await get_experiment_definition(db, experiment_key)
|
||||
|
||||
duplicate_cookie = await db.execute(
|
||||
select(Experiment).where(
|
||||
Experiment.cookie_name == payload.cookie_name,
|
||||
Experiment.experiment_key != experiment_key,
|
||||
)
|
||||
)
|
||||
if duplicate_cookie.scalars().first():
|
||||
raise ValueError("cookie_name is already used by another experiment")
|
||||
|
||||
if experiment is None:
|
||||
experiment = Experiment(
|
||||
experiment_key=experiment_key,
|
||||
cookie_name=payload.cookie_name,
|
||||
name=payload.name,
|
||||
description=payload.description,
|
||||
enabled=payload.enabled,
|
||||
eligible_routes=payload.eligible_routes,
|
||||
)
|
||||
db.add(experiment)
|
||||
await db.flush()
|
||||
existing_variants: dict[str, ExperimentVariant] = {}
|
||||
else:
|
||||
experiment.cookie_name = payload.cookie_name
|
||||
experiment.name = payload.name
|
||||
experiment.description = payload.description
|
||||
experiment.enabled = payload.enabled
|
||||
experiment.eligible_routes = payload.eligible_routes
|
||||
existing_variants = {variant.variant_key: variant for variant in experiment.variants}
|
||||
|
||||
incoming_keys = {variant.variant_key for variant in payload.variants}
|
||||
for variant in list(existing_variants.values()):
|
||||
if variant.variant_key not in incoming_keys:
|
||||
await db.delete(variant)
|
||||
|
||||
for variant_payload in payload.variants:
|
||||
variant = existing_variants.get(variant_payload.variant_key)
|
||||
if variant is None:
|
||||
db.add(
|
||||
ExperimentVariant(
|
||||
experiment_id=experiment.id,
|
||||
variant_key=variant_payload.variant_key,
|
||||
label=variant_payload.label,
|
||||
allocation=variant_payload.allocation,
|
||||
is_control=variant_payload.is_control,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
variant.label = variant_payload.label
|
||||
variant.allocation = variant_payload.allocation
|
||||
variant.is_control = variant_payload.is_control
|
||||
|
||||
await db.flush()
|
||||
refreshed = await get_experiment_definition(db, experiment_key)
|
||||
assert refreshed is not None
|
||||
return refreshed
|
||||
@@ -0,0 +1,614 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.database import AsyncSessionLocal
|
||||
from app.models.member import AdminMessage, Booking, Member, MemberNotificationDispatch, Walk
|
||||
from app.models.settings import SiteSettings
|
||||
from app.services.sections import get_section
|
||||
from app.services.settings import get_feature_settings_snapshot, get_settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
NZ_TZ = ZoneInfo("Pacific/Auckland")
|
||||
AUTOMATION_INTERVAL_SECONDS = 3600
|
||||
SERVICE_LABELS = {
|
||||
"pack_walk": "Pack Walk",
|
||||
"1_1_walk": "1-1 Walk",
|
||||
"puppy_visit": "Puppy Visit",
|
||||
}
|
||||
DEFAULT_SENT_BY = "Goodwalk"
|
||||
DEFAULT_INVOICE_DAY = 1
|
||||
MATARIKI_DATES = {
|
||||
2025: date(2025, 6, 20),
|
||||
2026: date(2026, 7, 10),
|
||||
2027: date(2027, 6, 25),
|
||||
2028: date(2028, 7, 14),
|
||||
2029: date(2029, 7, 6),
|
||||
2030: date(2030, 6, 21),
|
||||
2031: date(2031, 7, 11),
|
||||
2032: date(2032, 7, 2),
|
||||
2033: date(2033, 6, 24),
|
||||
2034: date(2034, 7, 7),
|
||||
2035: date(2035, 6, 29),
|
||||
}
|
||||
AUTOMATIC_NOTIFICATION_SECTION_KEY = "notifications.automaticMessages"
|
||||
PUBLIC_HOLIDAY_NOTIFICATION_SECTION_KEY = "notifications.publicHolidays"
|
||||
INVOICE_REMINDER_NOTIFICATION_SECTION_KEY = "notifications.invoiceReminders"
|
||||
TEMPLATE_TOKEN_PATTERN = re.compile(r"{{\s*([a-zA-Z0-9_]+)\s*}}")
|
||||
DEFAULT_AUTOMATIC_NOTIFICATION_TEMPLATES = {
|
||||
"member_activated": {
|
||||
"subject": "Your Goodwalk members account is now active",
|
||||
"body": "Your onboarding is complete and your full members area is now ready. You can log in to view bookings, messages, walks, and your contract.",
|
||||
},
|
||||
"booking_confirmed": {
|
||||
"subject": "Your Goodwalk booking has been confirmed",
|
||||
"body": "Your {{service_label}} booking for {{requested_date_label}} has been confirmed. If anything changes, we will message you here.",
|
||||
},
|
||||
"booking_rescheduled": {
|
||||
"subject": "Your Goodwalk booking has been rescheduled",
|
||||
"body": "Your {{service_label}} has been moved to {{requested_date_label}}. If you have any questions, please get in touch.",
|
||||
},
|
||||
"booking_cancelled": {
|
||||
"subject": "Your Goodwalk booking has been cancelled",
|
||||
"body": "Your {{service_label}} booking for {{requested_date_label}} has been cancelled. Please contact us if you would like to arrange another time.",
|
||||
},
|
||||
"walk_completed": {
|
||||
"subject": "Your Goodwalk walk is complete",
|
||||
"body": "{{member_first_name}}, your {{service_label}} on {{walked_on_label}} has been marked as complete.{{walk_notes_sentence}}",
|
||||
},
|
||||
}
|
||||
DEFAULT_PUBLIC_HOLIDAY_NOTIFICATION_TEMPLATE = {
|
||||
"subject": "Goodwalk public holiday update: {{holiday_name}}",
|
||||
"body": "Today is {{holiday_name}} in New Zealand. If you were expecting service changes or slower replies today, this is why. We will confirm any booking adjustments directly in your messages.",
|
||||
}
|
||||
DEFAULT_INVOICE_REMINDER_NOTIFICATION_TEMPLATE = {
|
||||
"subject": "Invoice reminder from Goodwalk",
|
||||
"body": "A quick reminder that invoices are scheduled to go out on {{weekday_label}}. This week that falls on {{invoice_date_label}}.",
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class NotificationSettingsSnapshot:
|
||||
automatic_member_notifications_enabled: bool = True
|
||||
nz_public_holiday_notifications_enabled: bool = True
|
||||
invoice_reminder_notifications_enabled: bool = True
|
||||
invoice_day_of_week: int = DEFAULT_INVOICE_DAY
|
||||
admin_notifications_cleared_before: datetime | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class NotificationRunSummary:
|
||||
automatic_member_notifications_enabled: bool
|
||||
public_holiday_messages_sent: int = 0
|
||||
invoice_reminders_sent: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class NotificationTemplateSnapshot:
|
||||
automatic_messages: dict[str, dict[str, str]]
|
||||
public_holidays: dict[str, str]
|
||||
invoice_reminders: dict[str, str]
|
||||
|
||||
|
||||
def _service_label(service_type: str | None) -> str:
|
||||
return SERVICE_LABELS.get(service_type or "", service_type or "service")
|
||||
|
||||
|
||||
def _weekday_label(weekday: int) -> str:
|
||||
labels = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"]
|
||||
return labels[weekday] if 0 <= weekday < len(labels) else labels[DEFAULT_INVOICE_DAY]
|
||||
|
||||
|
||||
def _format_nz_date(day: datetime | date, *, include_year: bool = False) -> str:
|
||||
if isinstance(day, datetime):
|
||||
local_day = day.astimezone(NZ_TZ)
|
||||
base = local_day.date()
|
||||
else:
|
||||
base = day
|
||||
|
||||
month = base.strftime("%B")
|
||||
weekday = base.strftime("%A")
|
||||
if include_year:
|
||||
return f"{weekday} {base.day} {month} {base.year}"
|
||||
return f"{weekday} {base.day} {month}"
|
||||
|
||||
|
||||
def _shift_single_holiday(day: date) -> date:
|
||||
if day.weekday() == 5:
|
||||
return day + timedelta(days=2)
|
||||
if day.weekday() == 6:
|
||||
return day + timedelta(days=1)
|
||||
return day
|
||||
|
||||
|
||||
def _observed_pair(day_one: date, day_two: date) -> list[date]:
|
||||
observed: list[date] = []
|
||||
|
||||
for actual in [day_one, day_two]:
|
||||
candidate = actual
|
||||
if candidate.weekday() >= 5:
|
||||
candidate += timedelta(days=7 - candidate.weekday())
|
||||
while candidate in observed:
|
||||
candidate += timedelta(days=1)
|
||||
observed.append(candidate)
|
||||
|
||||
return observed
|
||||
|
||||
|
||||
def _nth_weekday(year: int, month: int, weekday: int, occurrence: int) -> date:
|
||||
first = date(year, month, 1)
|
||||
offset = (weekday - first.weekday()) % 7
|
||||
return first + timedelta(days=offset + (occurrence - 1) * 7)
|
||||
|
||||
|
||||
def _easter_sunday(year: int) -> date:
|
||||
a = year % 19
|
||||
b = year // 100
|
||||
c = year % 100
|
||||
d = b // 4
|
||||
e = b % 4
|
||||
f = (b + 8) // 25
|
||||
g = (b - f + 1) // 3
|
||||
h = (19 * a + b - d - g + 15) % 30
|
||||
i = c // 4
|
||||
k = c % 4
|
||||
l = (32 + 2 * e + 2 * i - h - k) % 7
|
||||
m = (a + 11 * h + 22 * l) // 451
|
||||
month = (h + l - 7 * m + 114) // 31
|
||||
day = ((h + l - 7 * m + 114) % 31) + 1
|
||||
return date(year, month, day)
|
||||
|
||||
|
||||
def nz_public_holidays_for_year(year: int) -> dict[date, str]:
|
||||
easter = _easter_sunday(year)
|
||||
new_year, day_after = _observed_pair(date(year, 1, 1), date(year, 1, 2))
|
||||
christmas, boxing = _observed_pair(date(year, 12, 25), date(year, 12, 26))
|
||||
|
||||
holidays = {
|
||||
new_year: "New Year's Day",
|
||||
day_after: "Day after New Year's Day",
|
||||
_shift_single_holiday(date(year, 2, 6)): "Waitangi Day",
|
||||
easter - timedelta(days=2): "Good Friday",
|
||||
easter + timedelta(days=1): "Easter Monday",
|
||||
_shift_single_holiday(date(year, 4, 25)): "ANZAC Day",
|
||||
_nth_weekday(year, 6, 0, 1): "King's Birthday",
|
||||
_nth_weekday(year, 10, 0, 4): "Labour Day",
|
||||
christmas: "Christmas Day",
|
||||
boxing: "Boxing Day",
|
||||
}
|
||||
|
||||
matariki = MATARIKI_DATES.get(year)
|
||||
if matariki is not None:
|
||||
holidays[matariki] = "Matariki"
|
||||
|
||||
return holidays
|
||||
|
||||
|
||||
def nz_public_holiday_name(day: date) -> str | None:
|
||||
return nz_public_holidays_for_year(day.year).get(day)
|
||||
|
||||
|
||||
def _normalize_template_text(value: object, fallback: str) -> str:
|
||||
if isinstance(value, str):
|
||||
trimmed = value.strip()
|
||||
if trimmed:
|
||||
return trimmed
|
||||
return fallback
|
||||
|
||||
|
||||
def _normalize_subject_body_template(data: object, fallback: dict[str, str]) -> dict[str, str]:
|
||||
source = data if isinstance(data, dict) else {}
|
||||
return {
|
||||
"subject": _normalize_template_text(source.get("subject"), fallback["subject"]),
|
||||
"body": _normalize_template_text(source.get("body"), fallback["body"]),
|
||||
}
|
||||
|
||||
|
||||
async def get_notification_template_snapshot(db: AsyncSession) -> NotificationTemplateSnapshot:
|
||||
automatic_section = await get_section(db, AUTOMATIC_NOTIFICATION_SECTION_KEY)
|
||||
automatic_templates = deepcopy(DEFAULT_AUTOMATIC_NOTIFICATION_TEMPLATES)
|
||||
automatic_source = automatic_section.get("templates") if isinstance(automatic_section, dict) else {}
|
||||
if isinstance(automatic_source, dict):
|
||||
for template_key, fallback in DEFAULT_AUTOMATIC_NOTIFICATION_TEMPLATES.items():
|
||||
automatic_templates[template_key] = _normalize_subject_body_template(
|
||||
automatic_source.get(template_key),
|
||||
fallback,
|
||||
)
|
||||
|
||||
public_holiday_section = await get_section(db, PUBLIC_HOLIDAY_NOTIFICATION_SECTION_KEY)
|
||||
invoice_reminder_section = await get_section(db, INVOICE_REMINDER_NOTIFICATION_SECTION_KEY)
|
||||
|
||||
return NotificationTemplateSnapshot(
|
||||
automatic_messages=automatic_templates,
|
||||
public_holidays=_normalize_subject_body_template(
|
||||
public_holiday_section,
|
||||
DEFAULT_PUBLIC_HOLIDAY_NOTIFICATION_TEMPLATE,
|
||||
),
|
||||
invoice_reminders=_normalize_subject_body_template(
|
||||
invoice_reminder_section,
|
||||
DEFAULT_INVOICE_REMINDER_NOTIFICATION_TEMPLATE,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _render_template_text(template: str, context: dict[str, object]) -> str:
|
||||
def replace(match: re.Match[str]) -> str:
|
||||
return str(context.get(match.group(1), ""))
|
||||
|
||||
return TEMPLATE_TOKEN_PATTERN.sub(replace, template).strip()
|
||||
|
||||
|
||||
def _render_subject_body_template(template: dict[str, str], context: dict[str, object]) -> tuple[str, str]:
|
||||
return (
|
||||
_render_template_text(template["subject"], context),
|
||||
_render_template_text(template["body"], context),
|
||||
)
|
||||
|
||||
|
||||
async def get_notification_settings_snapshot(db: AsyncSession) -> NotificationSettingsSnapshot:
|
||||
row = await get_settings(db)
|
||||
if row is None:
|
||||
return NotificationSettingsSnapshot()
|
||||
|
||||
return NotificationSettingsSnapshot(
|
||||
automatic_member_notifications_enabled=row.automatic_member_notifications_enabled,
|
||||
nz_public_holiday_notifications_enabled=row.nz_public_holiday_notifications_enabled,
|
||||
invoice_reminder_notifications_enabled=row.invoice_reminder_notifications_enabled,
|
||||
invoice_day_of_week=row.invoice_day_of_week,
|
||||
admin_notifications_cleared_before=row.admin_notifications_cleared_before,
|
||||
)
|
||||
|
||||
|
||||
async def update_notification_settings_snapshot(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
automatic_member_notifications_enabled: bool | None = None,
|
||||
nz_public_holiday_notifications_enabled: bool | None = None,
|
||||
invoice_reminder_notifications_enabled: bool | None = None,
|
||||
invoice_day_of_week: int | None = None,
|
||||
admin_notifications_cleared_before: datetime | None = None,
|
||||
) -> NotificationSettingsSnapshot:
|
||||
row = await get_settings(db)
|
||||
if row is None:
|
||||
row = SiteSettings(site_name="")
|
||||
db.add(row)
|
||||
await db.flush()
|
||||
|
||||
if automatic_member_notifications_enabled is not None:
|
||||
row.automatic_member_notifications_enabled = automatic_member_notifications_enabled
|
||||
if nz_public_holiday_notifications_enabled is not None:
|
||||
row.nz_public_holiday_notifications_enabled = nz_public_holiday_notifications_enabled
|
||||
if invoice_reminder_notifications_enabled is not None:
|
||||
row.invoice_reminder_notifications_enabled = invoice_reminder_notifications_enabled
|
||||
if invoice_day_of_week is not None:
|
||||
if invoice_day_of_week < 0 or invoice_day_of_week > 6:
|
||||
raise ValueError("invoice_day_of_week must be between 0 and 6")
|
||||
row.invoice_day_of_week = invoice_day_of_week
|
||||
if admin_notifications_cleared_before is not None:
|
||||
row.admin_notifications_cleared_before = admin_notifications_cleared_before
|
||||
|
||||
await db.flush()
|
||||
await db.refresh(row)
|
||||
return await get_notification_settings_snapshot(db)
|
||||
|
||||
|
||||
async def create_member_message(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
member: Member,
|
||||
subject: str,
|
||||
body: str,
|
||||
sent_by: str = DEFAULT_SENT_BY,
|
||||
automatic: bool = False,
|
||||
dispatch_key: str | None = None,
|
||||
notification_type: str | None = None,
|
||||
respect_preferences: bool = True,
|
||||
) -> AdminMessage | None:
|
||||
feature_settings = await get_feature_settings_snapshot(db)
|
||||
if not feature_settings.messages_enabled:
|
||||
return None
|
||||
|
||||
if respect_preferences and not member.notifications_enabled:
|
||||
return None
|
||||
|
||||
if automatic:
|
||||
settings = await get_notification_settings_snapshot(db)
|
||||
if not settings.automatic_member_notifications_enabled:
|
||||
return None
|
||||
|
||||
if dispatch_key is not None:
|
||||
existing = await db.execute(
|
||||
select(MemberNotificationDispatch).where(
|
||||
MemberNotificationDispatch.member_id == member.id,
|
||||
MemberNotificationDispatch.dispatch_key == dispatch_key,
|
||||
)
|
||||
)
|
||||
if existing.scalars().first() is not None:
|
||||
return None
|
||||
|
||||
db.add(
|
||||
MemberNotificationDispatch(
|
||||
member_id=member.id,
|
||||
notification_type=notification_type or "notification",
|
||||
dispatch_key=dispatch_key,
|
||||
metadata_json={"automatic": automatic},
|
||||
)
|
||||
)
|
||||
|
||||
message = AdminMessage(
|
||||
member_id=member.id,
|
||||
subject=subject,
|
||||
body=body,
|
||||
sent_by=sent_by,
|
||||
)
|
||||
db.add(message)
|
||||
await db.flush()
|
||||
return message
|
||||
|
||||
|
||||
async def send_account_activated_notification(
|
||||
db: AsyncSession,
|
||||
member: Member,
|
||||
*,
|
||||
templates: NotificationTemplateSnapshot | None = None,
|
||||
) -> AdminMessage | None:
|
||||
templates = templates or await get_notification_template_snapshot(db)
|
||||
subject, body = _render_subject_body_template(
|
||||
templates.automatic_messages["member_activated"],
|
||||
{
|
||||
"member_first_name": member.first_name,
|
||||
"member_last_name": member.last_name,
|
||||
},
|
||||
)
|
||||
return await create_member_message(
|
||||
db,
|
||||
member=member,
|
||||
subject=subject,
|
||||
body=body,
|
||||
automatic=True,
|
||||
dispatch_key=f"member_activated:{member.id}:{member.activated_at.isoformat() if member.activated_at else 'pending'}",
|
||||
notification_type="member_activated",
|
||||
)
|
||||
|
||||
|
||||
async def send_booking_status_notification(
|
||||
db: AsyncSession,
|
||||
member: Member,
|
||||
booking: Booking,
|
||||
*,
|
||||
templates: NotificationTemplateSnapshot | None = None,
|
||||
) -> AdminMessage | None:
|
||||
if booking.status not in {"confirmed", "cancelled"}:
|
||||
return None
|
||||
|
||||
templates = templates or await get_notification_template_snapshot(db)
|
||||
requested = _format_nz_date(booking.requested_date, include_year=True) if booking.requested_date else "the requested date"
|
||||
service_name = _service_label(booking.service_type)
|
||||
|
||||
if booking.status == "confirmed":
|
||||
template = templates.automatic_messages["booking_confirmed"]
|
||||
else:
|
||||
template = templates.automatic_messages["booking_cancelled"]
|
||||
|
||||
subject, body = _render_subject_body_template(
|
||||
template,
|
||||
{
|
||||
"member_first_name": member.first_name,
|
||||
"member_last_name": member.last_name,
|
||||
"service_label": service_name,
|
||||
"requested_date_label": requested,
|
||||
"booking_status": booking.status,
|
||||
},
|
||||
)
|
||||
|
||||
return await create_member_message(
|
||||
db,
|
||||
member=member,
|
||||
subject=subject,
|
||||
body=body,
|
||||
automatic=True,
|
||||
dispatch_key=f"booking_status:{booking.id}:{booking.status}",
|
||||
notification_type="booking_status",
|
||||
)
|
||||
|
||||
|
||||
async def send_booking_rescheduled_notification(
|
||||
db: AsyncSession,
|
||||
member: Member,
|
||||
booking: Booking,
|
||||
*,
|
||||
templates: NotificationTemplateSnapshot | None = None,
|
||||
) -> AdminMessage | None:
|
||||
templates = templates or await get_notification_template_snapshot(db)
|
||||
requested = _format_nz_date(booking.requested_date, include_year=True) if booking.requested_date else "a new date"
|
||||
service_name = _service_label(booking.service_type)
|
||||
|
||||
subject, body = _render_subject_body_template(
|
||||
templates.automatic_messages["booking_rescheduled"],
|
||||
{
|
||||
"member_first_name": member.first_name,
|
||||
"member_last_name": member.last_name,
|
||||
"service_label": service_name,
|
||||
"requested_date_label": requested,
|
||||
},
|
||||
)
|
||||
|
||||
return await create_member_message(
|
||||
db,
|
||||
member=member,
|
||||
subject=subject,
|
||||
body=body,
|
||||
automatic=True,
|
||||
dispatch_key=f"booking_rescheduled:{booking.id}:{booking.requested_date.isoformat() if booking.requested_date else 'none'}",
|
||||
notification_type="booking_rescheduled",
|
||||
)
|
||||
|
||||
|
||||
async def send_walk_completed_notification(
|
||||
db: AsyncSession,
|
||||
member: Member,
|
||||
walk: Walk,
|
||||
*,
|
||||
templates: NotificationTemplateSnapshot | None = None,
|
||||
) -> AdminMessage | None:
|
||||
templates = templates or await get_notification_template_snapshot(db)
|
||||
walked_on = _format_nz_date(walk.walked_at)
|
||||
service_name = _service_label(walk.service_type)
|
||||
subject, body = _render_subject_body_template(
|
||||
templates.automatic_messages["walk_completed"],
|
||||
{
|
||||
"member_first_name": member.first_name,
|
||||
"member_last_name": member.last_name,
|
||||
"service_label": service_name,
|
||||
"walked_on_label": walked_on,
|
||||
"walk_notes": walk.notes or "",
|
||||
"walk_notes_sentence": f" Notes from the team: {walk.notes}" if walk.notes else "",
|
||||
},
|
||||
)
|
||||
|
||||
return await create_member_message(
|
||||
db,
|
||||
member=member,
|
||||
subject=subject,
|
||||
body=body,
|
||||
automatic=True,
|
||||
dispatch_key=f"walk_completed:{walk.id}",
|
||||
notification_type="walk_completed",
|
||||
)
|
||||
|
||||
|
||||
async def send_public_holiday_notification(
|
||||
db: AsyncSession,
|
||||
member: Member,
|
||||
holiday_date: date,
|
||||
holiday_name: str,
|
||||
*,
|
||||
templates: NotificationTemplateSnapshot | None = None,
|
||||
) -> AdminMessage | None:
|
||||
templates = templates or await get_notification_template_snapshot(db)
|
||||
subject, body = _render_subject_body_template(
|
||||
templates.public_holidays,
|
||||
{
|
||||
"member_first_name": member.first_name,
|
||||
"member_last_name": member.last_name,
|
||||
"holiday_name": holiday_name,
|
||||
"holiday_date_label": _format_nz_date(holiday_date, include_year=True),
|
||||
},
|
||||
)
|
||||
return await create_member_message(
|
||||
db,
|
||||
member=member,
|
||||
subject=subject,
|
||||
body=body,
|
||||
automatic=True,
|
||||
dispatch_key=f"public_holiday:{holiday_date.isoformat()}",
|
||||
notification_type="public_holiday",
|
||||
)
|
||||
|
||||
|
||||
async def send_invoice_day_notification(
|
||||
db: AsyncSession,
|
||||
member: Member,
|
||||
invoice_date: date,
|
||||
weekday_label: str,
|
||||
*,
|
||||
templates: NotificationTemplateSnapshot | None = None,
|
||||
) -> AdminMessage | None:
|
||||
templates = templates or await get_notification_template_snapshot(db)
|
||||
subject, body = _render_subject_body_template(
|
||||
templates.invoice_reminders,
|
||||
{
|
||||
"member_first_name": member.first_name,
|
||||
"member_last_name": member.last_name,
|
||||
"weekday_label": weekday_label,
|
||||
"invoice_date_label": _format_nz_date(invoice_date),
|
||||
},
|
||||
)
|
||||
return await create_member_message(
|
||||
db,
|
||||
member=member,
|
||||
subject=subject,
|
||||
body=body,
|
||||
automatic=True,
|
||||
dispatch_key=f"invoice_reminder:{invoice_date.isoformat()}",
|
||||
notification_type="invoice_reminder",
|
||||
)
|
||||
|
||||
|
||||
async def run_automatic_notifications(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
now: datetime | None = None,
|
||||
) -> NotificationRunSummary:
|
||||
settings = await get_notification_settings_snapshot(db)
|
||||
summary = NotificationRunSummary(
|
||||
automatic_member_notifications_enabled=settings.automatic_member_notifications_enabled,
|
||||
)
|
||||
|
||||
if not settings.automatic_member_notifications_enabled:
|
||||
return summary
|
||||
|
||||
templates = await get_notification_template_snapshot(db)
|
||||
|
||||
local_now = (now or datetime.now(timezone.utc)).astimezone(NZ_TZ)
|
||||
local_date = local_now.date()
|
||||
|
||||
result = await db.execute(
|
||||
select(Member).where(
|
||||
Member.is_active == True, # noqa: E712
|
||||
Member.member_status == "active",
|
||||
Member.notifications_enabled == True, # noqa: E712
|
||||
)
|
||||
)
|
||||
members = result.scalars().all()
|
||||
|
||||
if settings.nz_public_holiday_notifications_enabled:
|
||||
holiday_name = nz_public_holiday_name(local_date)
|
||||
if holiday_name:
|
||||
for member in members:
|
||||
created = await send_public_holiday_notification(
|
||||
db,
|
||||
member,
|
||||
local_date,
|
||||
holiday_name,
|
||||
templates=templates,
|
||||
)
|
||||
if created is not None:
|
||||
summary.public_holiday_messages_sent += 1
|
||||
|
||||
if settings.invoice_reminder_notifications_enabled and local_date.weekday() == settings.invoice_day_of_week:
|
||||
weekday_label = _weekday_label(settings.invoice_day_of_week)
|
||||
for member in members:
|
||||
created = await send_invoice_day_notification(
|
||||
db,
|
||||
member,
|
||||
local_date,
|
||||
weekday_label,
|
||||
templates=templates,
|
||||
)
|
||||
if created is not None:
|
||||
summary.invoice_reminders_sent += 1
|
||||
|
||||
return summary
|
||||
|
||||
|
||||
async def notification_automation_loop() -> None:
|
||||
while True:
|
||||
try:
|
||||
async with AsyncSessionLocal() as session:
|
||||
await run_automatic_notifications(session)
|
||||
await session.commit()
|
||||
except asyncio.CancelledError:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("Automatic member notification loop failed.")
|
||||
|
||||
await asyncio.sleep(AUTOMATION_INTERVAL_SECONDS)
|
||||
@@ -0,0 +1,73 @@
|
||||
"""
|
||||
Service layer for Page CRUD operations.
|
||||
All DB queries are async; HTML body is sanitized on write.
|
||||
"""
|
||||
import nh3
|
||||
from typing import Optional
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.page import Page
|
||||
from app.schemas.page import PageCreate, PageUpdate
|
||||
|
||||
|
||||
def _sanitize_body(body: str) -> str:
|
||||
"""Strip dangerous HTML tags/attributes using nh3."""
|
||||
return nh3.clean(body)
|
||||
|
||||
|
||||
async def get_published_pages(db: AsyncSession) -> list[Page]:
|
||||
result = await db.execute(
|
||||
select(Page).where(Page.published == True).order_by(Page.created_at.desc())
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
async def get_page_by_slug(db: AsyncSession, slug: str, published_only: bool = True) -> Optional[Page]:
|
||||
stmt = select(Page).where(Page.slug == slug)
|
||||
if published_only:
|
||||
stmt = stmt.where(Page.published == True)
|
||||
result = await db.execute(stmt)
|
||||
return result.scalars().first()
|
||||
|
||||
|
||||
async def create_page(db: AsyncSession, data: PageCreate) -> Page:
|
||||
page = Page(
|
||||
title=data.title,
|
||||
slug=data.slug,
|
||||
body=_sanitize_body(data.body),
|
||||
meta_title=data.meta_title,
|
||||
meta_description=data.meta_description,
|
||||
og_image_url=data.og_image_url,
|
||||
published=data.published,
|
||||
)
|
||||
db.add(page)
|
||||
await db.flush()
|
||||
await db.refresh(page)
|
||||
return page
|
||||
|
||||
|
||||
async def update_page(db: AsyncSession, slug: str, data: PageUpdate) -> Optional[Page]:
|
||||
page = await get_page_by_slug(db, slug, published_only=False)
|
||||
if page is None:
|
||||
return None
|
||||
|
||||
update_data = data.model_dump(exclude_unset=True)
|
||||
if "body" in update_data and update_data["body"] is not None:
|
||||
update_data["body"] = _sanitize_body(update_data["body"])
|
||||
|
||||
for field, value in update_data.items():
|
||||
setattr(page, field, value)
|
||||
|
||||
await db.flush()
|
||||
await db.refresh(page)
|
||||
return page
|
||||
|
||||
|
||||
async def delete_page(db: AsyncSession, slug: str) -> bool:
|
||||
page = await get_page_by_slug(db, slug, published_only=False)
|
||||
if page is None:
|
||||
return False
|
||||
await db.delete(page)
|
||||
await db.flush()
|
||||
return True
|
||||
@@ -0,0 +1,99 @@
|
||||
"""
|
||||
Service layer for BlogPost CRUD operations.
|
||||
"""
|
||||
import math
|
||||
import nh3
|
||||
from typing import Optional
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.post import BlogPost
|
||||
from app.schemas.post import PostCreate, PostUpdate, PaginatedPostsResponse, PostResponse
|
||||
|
||||
|
||||
def _sanitize_body(body: str) -> str:
|
||||
"""Strip dangerous HTML tags/attributes using nh3."""
|
||||
return nh3.clean(body)
|
||||
|
||||
|
||||
async def get_published_posts(
|
||||
db: AsyncSession, page: int = 1, per_page: int = 10
|
||||
) -> PaginatedPostsResponse:
|
||||
offset = (page - 1) * per_page
|
||||
|
||||
count_result = await db.execute(
|
||||
select(func.count()).select_from(BlogPost).where(BlogPost.published == True)
|
||||
)
|
||||
total = count_result.scalar_one()
|
||||
|
||||
result = await db.execute(
|
||||
select(BlogPost)
|
||||
.where(BlogPost.published == True)
|
||||
.order_by(BlogPost.created_at.desc())
|
||||
.offset(offset)
|
||||
.limit(per_page)
|
||||
)
|
||||
items = list(result.scalars().all())
|
||||
|
||||
total_pages = math.ceil(total / per_page) if per_page > 0 else 0
|
||||
|
||||
return PaginatedPostsResponse(
|
||||
items=[PostResponse.model_validate(p) for p in items],
|
||||
total=total,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total_pages=total_pages,
|
||||
)
|
||||
|
||||
|
||||
async def get_post_by_slug(
|
||||
db: AsyncSession, slug: str, published_only: bool = True
|
||||
) -> Optional[BlogPost]:
|
||||
stmt = select(BlogPost).where(BlogPost.slug == slug)
|
||||
if published_only:
|
||||
stmt = stmt.where(BlogPost.published == True)
|
||||
result = await db.execute(stmt)
|
||||
return result.scalars().first()
|
||||
|
||||
|
||||
async def create_post(db: AsyncSession, data: PostCreate) -> BlogPost:
|
||||
post = BlogPost(
|
||||
title=data.title,
|
||||
slug=data.slug,
|
||||
excerpt=data.excerpt,
|
||||
body=_sanitize_body(data.body),
|
||||
author=data.author,
|
||||
featured_image_url=data.featured_image_url,
|
||||
tags=data.tags,
|
||||
published=data.published,
|
||||
)
|
||||
db.add(post)
|
||||
await db.flush()
|
||||
await db.refresh(post)
|
||||
return post
|
||||
|
||||
|
||||
async def update_post(db: AsyncSession, slug: str, data: PostUpdate) -> Optional[BlogPost]:
|
||||
post = await get_post_by_slug(db, slug, published_only=False)
|
||||
if post is None:
|
||||
return None
|
||||
|
||||
update_data = data.model_dump(exclude_unset=True)
|
||||
if "body" in update_data and update_data["body"] is not None:
|
||||
update_data["body"] = _sanitize_body(update_data["body"])
|
||||
|
||||
for field, value in update_data.items():
|
||||
setattr(post, field, value)
|
||||
|
||||
await db.flush()
|
||||
await db.refresh(post)
|
||||
return post
|
||||
|
||||
|
||||
async def delete_post(db: AsyncSession, slug: str) -> bool:
|
||||
post = await get_post_by_slug(db, slug, published_only=False)
|
||||
if post is None:
|
||||
return False
|
||||
await db.delete(post)
|
||||
await db.flush()
|
||||
return True
|
||||
@@ -0,0 +1,80 @@
|
||||
from copy import deepcopy
|
||||
|
||||
|
||||
SERVICE_PRICING_DEFAULTS = {
|
||||
"pack_walk": {
|
||||
"label": "Pack Walk",
|
||||
"amount": 58.0,
|
||||
"unit": "per walk",
|
||||
},
|
||||
"1_1_walk": {
|
||||
"label": "1-1 Walk",
|
||||
"amount": 45.0,
|
||||
"unit": "per walk",
|
||||
},
|
||||
"puppy_visit": {
|
||||
"label": "Puppy Visit",
|
||||
"amount": 39.0,
|
||||
"unit": "per visit",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def default_service_pricing() -> dict[str, dict[str, float | str]]:
|
||||
return deepcopy(SERVICE_PRICING_DEFAULTS)
|
||||
|
||||
|
||||
def normalize_service_pricing(data: object | None) -> dict[str, dict[str, float | str]]:
|
||||
normalized = default_service_pricing()
|
||||
source = data if isinstance(data, dict) else {}
|
||||
|
||||
for service_type, defaults in normalized.items():
|
||||
candidate = source.get(service_type) if isinstance(source, dict) else None
|
||||
if not isinstance(candidate, dict):
|
||||
continue
|
||||
|
||||
amount = candidate.get("amount")
|
||||
try:
|
||||
parsed_amount = round(float(amount), 2)
|
||||
except (TypeError, ValueError):
|
||||
parsed_amount = defaults["amount"]
|
||||
|
||||
if parsed_amount < 0:
|
||||
parsed_amount = defaults["amount"]
|
||||
|
||||
unit = candidate.get("unit")
|
||||
label = candidate.get("label")
|
||||
|
||||
normalized[service_type] = {
|
||||
"label": label.strip() if isinstance(label, str) and label.strip() else defaults["label"],
|
||||
"amount": parsed_amount,
|
||||
"unit": unit.strip() if isinstance(unit, str) and unit.strip() else defaults["unit"],
|
||||
}
|
||||
|
||||
return normalized
|
||||
|
||||
|
||||
def normalize_service_pricing_overrides(data: object | None) -> dict[str, float]:
|
||||
if not isinstance(data, dict):
|
||||
return {}
|
||||
|
||||
normalized: dict[str, float] = {}
|
||||
for service_type in SERVICE_PRICING_DEFAULTS:
|
||||
if service_type not in data:
|
||||
continue
|
||||
|
||||
value = data.get(service_type)
|
||||
if value in (None, ""):
|
||||
continue
|
||||
|
||||
try:
|
||||
parsed = round(float(value), 2)
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
|
||||
if parsed < 0:
|
||||
continue
|
||||
|
||||
normalized[service_type] = parsed
|
||||
|
||||
return normalized
|
||||
@@ -0,0 +1,26 @@
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from app.models.section import ContentSection
|
||||
|
||||
|
||||
async def get_section(db: AsyncSession, key: str) -> dict | None:
|
||||
result = await db.execute(select(ContentSection).where(ContentSection.key == key))
|
||||
row = result.scalar_one_or_none()
|
||||
return row.data if row else None
|
||||
|
||||
|
||||
async def upsert_section(db: AsyncSession, key: str, data: dict) -> ContentSection:
|
||||
result = await db.execute(select(ContentSection).where(ContentSection.key == key))
|
||||
row = result.scalar_one_or_none()
|
||||
if row:
|
||||
row.data = data
|
||||
else:
|
||||
row = ContentSection(key=key, data=data)
|
||||
db.add(row)
|
||||
await db.flush()
|
||||
return row
|
||||
|
||||
|
||||
async def list_sections(db: AsyncSession) -> list[dict]:
|
||||
result = await db.execute(select(ContentSection).order_by(ContentSection.key))
|
||||
return [{"key": r.key, "updated_at": r.updated_at.isoformat()} for r in result.scalars()]
|
||||
@@ -0,0 +1,173 @@
|
||||
"""
|
||||
Service layer for SiteSettings singleton.
|
||||
Uses get-or-create pattern; only one row should ever exist.
|
||||
"""
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
from sqlalchemy import inspect as sa_inspect, select
|
||||
from sqlalchemy.orm import load_only
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.settings import SiteSettings
|
||||
from app.schemas.settings import FeatureSettingsUpdate, SiteSettingsUpdate
|
||||
from app.services.pricing import default_service_pricing, normalize_service_pricing
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class FeatureSettingsSnapshot:
|
||||
bookings_enabled: bool = True
|
||||
walks_enabled: bool = True
|
||||
messages_enabled: bool = True
|
||||
two_factor_enabled: bool = True
|
||||
audit_history_enabled: bool = True
|
||||
experiments_enabled: bool = True
|
||||
|
||||
|
||||
class FeatureSettingsSchemaOutdatedError(RuntimeError):
|
||||
"""Raised when feature settings are requested against an older schema."""
|
||||
|
||||
|
||||
class ServicePricingSchemaOutdatedError(RuntimeError):
|
||||
"""Raised when service pricing is requested against an older schema."""
|
||||
|
||||
|
||||
async def _get_site_settings_column_names(db: AsyncSession) -> set[str]:
|
||||
connection = await db.connection()
|
||||
|
||||
def _load_columns(sync_connection):
|
||||
inspector = sa_inspect(sync_connection)
|
||||
return {column["name"] for column in inspector.get_columns("site_settings")}
|
||||
|
||||
return await connection.run_sync(_load_columns)
|
||||
|
||||
|
||||
async def get_settings(db: AsyncSession, *, existing_columns: set[str] | None = None) -> Optional[SiteSettings]:
|
||||
column_names = existing_columns or await _get_site_settings_column_names(db)
|
||||
loadable_fields = [
|
||||
getattr(SiteSettings, column_name)
|
||||
for column_name in column_names
|
||||
if hasattr(SiteSettings, column_name)
|
||||
]
|
||||
|
||||
statement = select(SiteSettings).limit(1)
|
||||
if loadable_fields:
|
||||
statement = statement.options(load_only(*loadable_fields))
|
||||
|
||||
result = await db.execute(statement)
|
||||
return result.scalars().first()
|
||||
|
||||
|
||||
async def upsert_settings(db: AsyncSession, data: SiteSettingsUpdate) -> SiteSettings:
|
||||
settings_row = await get_settings(db)
|
||||
|
||||
if settings_row is None:
|
||||
# Create with defaults + provided values
|
||||
init_data = {
|
||||
"site_name": "",
|
||||
"tagline": None,
|
||||
"logo_url": None,
|
||||
"footer_text": None,
|
||||
"social_links": {},
|
||||
"automatic_member_notifications_enabled": True,
|
||||
"nz_public_holiday_notifications_enabled": True,
|
||||
"invoice_reminder_notifications_enabled": True,
|
||||
"invoice_day_of_week": 1,
|
||||
"admin_notifications_cleared_before": None,
|
||||
"bookings_enabled": True,
|
||||
"walks_enabled": True,
|
||||
"messages_enabled": True,
|
||||
"two_factor_enabled": True,
|
||||
"audit_history_enabled": True,
|
||||
"experiments_enabled": True,
|
||||
"service_pricing": default_service_pricing(),
|
||||
}
|
||||
update_data = data.model_dump(exclude_unset=True)
|
||||
init_data.update(update_data)
|
||||
settings_row = SiteSettings(**init_data)
|
||||
db.add(settings_row)
|
||||
else:
|
||||
update_data = data.model_dump(exclude_unset=True)
|
||||
for field, value in update_data.items():
|
||||
setattr(settings_row, field, value)
|
||||
|
||||
await db.flush()
|
||||
await db.refresh(settings_row)
|
||||
return settings_row
|
||||
|
||||
|
||||
async def get_feature_settings_snapshot(db: AsyncSession) -> FeatureSettingsSnapshot:
|
||||
existing_columns = await _get_site_settings_column_names(db)
|
||||
row = await get_settings(db, existing_columns=existing_columns)
|
||||
if row is None:
|
||||
return FeatureSettingsSnapshot()
|
||||
|
||||
return FeatureSettingsSnapshot(
|
||||
bookings_enabled=getattr(row, "bookings_enabled", True) if "bookings_enabled" in existing_columns else True,
|
||||
walks_enabled=getattr(row, "walks_enabled", True) if "walks_enabled" in existing_columns else True,
|
||||
messages_enabled=getattr(row, "messages_enabled", True) if "messages_enabled" in existing_columns else True,
|
||||
two_factor_enabled=getattr(row, "two_factor_enabled", True) if "two_factor_enabled" in existing_columns else True,
|
||||
audit_history_enabled=getattr(row, "audit_history_enabled", True) if "audit_history_enabled" in existing_columns else True,
|
||||
experiments_enabled=getattr(row, "experiments_enabled", True) if "experiments_enabled" in existing_columns else True,
|
||||
)
|
||||
|
||||
|
||||
async def update_feature_settings_snapshot(
|
||||
db: AsyncSession,
|
||||
data: FeatureSettingsUpdate,
|
||||
) -> FeatureSettingsSnapshot:
|
||||
existing_columns = await _get_site_settings_column_names(db)
|
||||
requested_fields = set(data.model_dump(exclude_unset=True).keys())
|
||||
missing_columns = sorted(field for field in requested_fields if field not in existing_columns)
|
||||
if missing_columns:
|
||||
raise FeatureSettingsSchemaOutdatedError(
|
||||
"Feature toggle columns are missing from site_settings. Run alembic upgrade head."
|
||||
)
|
||||
|
||||
row = await get_settings(db, existing_columns=existing_columns)
|
||||
if row is None:
|
||||
row = SiteSettings(site_name="")
|
||||
db.add(row)
|
||||
await db.flush()
|
||||
|
||||
update_data = data.model_dump(exclude_unset=True)
|
||||
for field, value in update_data.items():
|
||||
setattr(row, field, value)
|
||||
|
||||
await db.flush()
|
||||
await db.refresh(row)
|
||||
return await get_feature_settings_snapshot(db)
|
||||
|
||||
|
||||
async def get_service_pricing_snapshot(db: AsyncSession) -> dict[str, dict[str, float | str]]:
|
||||
existing_columns = await _get_site_settings_column_names(db)
|
||||
row = await get_settings(db, existing_columns=existing_columns)
|
||||
if row is None:
|
||||
return default_service_pricing()
|
||||
|
||||
if "service_pricing" not in existing_columns:
|
||||
return default_service_pricing()
|
||||
|
||||
return normalize_service_pricing(getattr(row, "service_pricing", None))
|
||||
|
||||
|
||||
async def update_service_pricing_snapshot(
|
||||
db: AsyncSession,
|
||||
*,
|
||||
service_pricing: dict,
|
||||
) -> dict[str, dict[str, float | str]]:
|
||||
existing_columns = await _get_site_settings_column_names(db)
|
||||
if "service_pricing" not in existing_columns:
|
||||
raise ServicePricingSchemaOutdatedError(
|
||||
"Service pricing columns are missing from site_settings. Run alembic upgrade head."
|
||||
)
|
||||
|
||||
row = await get_settings(db, existing_columns=existing_columns)
|
||||
if row is None:
|
||||
row = SiteSettings(site_name="", service_pricing=default_service_pricing())
|
||||
db.add(row)
|
||||
await db.flush()
|
||||
|
||||
row.service_pricing = normalize_service_pricing(service_pricing)
|
||||
await db.flush()
|
||||
await db.refresh(row)
|
||||
return await get_service_pricing_snapshot(db)
|
||||
Reference in New Issue
Block a user