This commit is contained in:
ponzischeme89
2026-04-18 07:23:55 +12:00
parent f210020772
commit 6d44e05de4
396 changed files with 75296 additions and 0 deletions
+7
View File
@@ -0,0 +1,7 @@
FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
EXPOSE 8000
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
+44
View File
@@ -0,0 +1,44 @@
[alembic]
script_location = alembic
prepend_sys_path = .
version_path_separator = os
# Default database URL — override via env var DATABASE_URL or by setting
# sqlalchemy.url here. We use env.py to read from app.config instead.
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
+74
View File
@@ -0,0 +1,74 @@
import asyncio
import os
import sys
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
# Make sure the app package is importable
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Import app settings and models so autogenerate can see them
from app.config import settings
from app.models.base import Base
# Import all models to register them with Base.metadata
import app.models # noqa: F401
# Alembic Config object — gives access to alembic.ini values
config = context.config
# Override sqlalchemy.url from our app settings
config.set_main_option("sqlalchemy.url", settings.DATABASE_URL)
# Interpret the config file for Python logging
if config.config_file_name is not None:
fileConfig(config.config_file_name)
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode (no DB connection required)."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations using an async engine."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode with an async engine."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
+26
View File
@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}
@@ -0,0 +1,40 @@
"""enrich analytics events
Revision ID: 3419d4e56131
Revises: 4f2e3f915e09
Create Date: 2026-03-29 23:29:47.836569
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '3419d4e56131'
down_revision: Union[str, None] = '4f2e3f915e09'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('analytics_events', sa.Column('ip_partial', sa.String(length=24), nullable=True))
op.add_column('analytics_events', sa.Column('user_agent', sa.String(length=512), nullable=True))
op.add_column('analytics_events', sa.Column('browser', sa.String(length=100), nullable=True))
op.add_column('analytics_events', sa.Column('os_name', sa.String(length=100), nullable=True))
op.add_column('analytics_events', sa.Column('country', sa.String(length=100), nullable=True))
op.add_column('analytics_events', sa.Column('city', sa.String(length=100), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('analytics_events', 'city')
op.drop_column('analytics_events', 'country')
op.drop_column('analytics_events', 'os_name')
op.drop_column('analytics_events', 'browser')
op.drop_column('analytics_events', 'user_agent')
op.drop_column('analytics_events', 'ip_partial')
# ### end Alembic commands ###
@@ -0,0 +1,48 @@
"""add analytics events
Revision ID: 4f2e3f915e09
Revises: 5881f111a194
Create Date: 2026-03-29 23:22:22.884950
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '4f2e3f915e09'
down_revision: Union[str, None] = '5881f111a194'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('analytics_events',
sa.Column('event_type', sa.String(length=64), nullable=False),
sa.Column('page', sa.String(length=255), nullable=False),
sa.Column('element', sa.String(length=255), nullable=True),
sa.Column('metadata', sa.JSON(), nullable=True),
sa.Column('session_id', sa.String(length=64), nullable=False),
sa.Column('ip_hash', sa.String(length=64), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_analytics_events_created_at'), 'analytics_events', ['created_at'], unique=False)
op.create_index(op.f('ix_analytics_events_event_type'), 'analytics_events', ['event_type'], unique=False)
op.create_index(op.f('ix_analytics_events_page'), 'analytics_events', ['page'], unique=False)
op.create_index(op.f('ix_analytics_events_session_id'), 'analytics_events', ['session_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_analytics_events_session_id'), table_name='analytics_events')
op.drop_index(op.f('ix_analytics_events_page'), table_name='analytics_events')
op.drop_index(op.f('ix_analytics_events_event_type'), table_name='analytics_events')
op.drop_index(op.f('ix_analytics_events_created_at'), table_name='analytics_events')
op.drop_table('analytics_events')
# ### end Alembic commands ###
@@ -0,0 +1,107 @@
"""initial
Revision ID: 5881f111a194
Revises:
Create Date: 2026-03-29 17:31:46.624084
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '5881f111a194'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('blog_posts',
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('slug', sa.String(length=255), nullable=False),
sa.Column('excerpt', sa.Text(), nullable=True),
sa.Column('body', sa.Text(), nullable=False),
sa.Column('author', sa.String(length=255), nullable=True),
sa.Column('featured_image_url', sa.String(length=2048), nullable=True),
sa.Column('tags', sa.JSON(), nullable=False),
sa.Column('published', sa.Boolean(), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('slug')
)
op.create_index('ix_blog_posts_slug', 'blog_posts', ['slug'], unique=False)
op.create_table('content_sections',
sa.Column('key', sa.Text(), nullable=False),
sa.Column('data', sa.JSON().with_variant(postgresql.JSONB(astext_type=sa.Text()), 'postgresql'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('key')
)
op.create_table('pages',
sa.Column('title', sa.String(length=255), nullable=False),
sa.Column('slug', sa.String(length=255), nullable=False),
sa.Column('body', sa.Text(), nullable=False),
sa.Column('meta_title', sa.String(length=255), nullable=True),
sa.Column('meta_description', sa.String(length=500), nullable=True),
sa.Column('og_image_url', sa.String(length=2048), nullable=True),
sa.Column('published', sa.Boolean(), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('slug')
)
op.create_index('ix_pages_slug', 'pages', ['slug'], unique=False)
op.create_table('site_settings',
sa.Column('site_name', sa.String(length=255), nullable=False),
sa.Column('tagline', sa.String(length=500), nullable=True),
sa.Column('logo_url', sa.String(length=2048), nullable=True),
sa.Column('footer_text', sa.Text(), nullable=True),
sa.Column('social_links', sa.JSON(), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('users',
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('hashed_password', sa.String(length=255), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_table('refresh_tokens',
sa.Column('user_id', sa.Uuid(), nullable=False),
sa.Column('token_hash', sa.String(length=255), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('revoked', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_refresh_tokens_user_id'), 'refresh_tokens', ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_refresh_tokens_user_id'), table_name='refresh_tokens')
op.drop_table('refresh_tokens')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
op.drop_table('site_settings')
op.drop_index('ix_pages_slug', table_name='pages')
op.drop_table('pages')
op.drop_table('content_sections')
op.drop_index('ix_blog_posts_slug', table_name='blog_posts')
op.drop_table('blog_posts')
# ### end Alembic commands ###
@@ -0,0 +1,37 @@
"""add member feature flags
Revision ID: 6c3f4e2a1b90
Revises: f25d0f745a17
Create Date: 2026-04-07 18:25:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "6c3f4e2a1b90"
down_revision = "f25d0f745a17"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"site_settings",
sa.Column("bookings_enabled", sa.Boolean(), nullable=False, server_default=sa.true()),
)
op.add_column(
"site_settings",
sa.Column("walks_enabled", sa.Boolean(), nullable=False, server_default=sa.true()),
)
op.add_column(
"site_settings",
sa.Column("messages_enabled", sa.Boolean(), nullable=False, server_default=sa.true()),
)
def downgrade() -> None:
op.drop_column("site_settings", "messages_enabled")
op.drop_column("site_settings", "walks_enabled")
op.drop_column("site_settings", "bookings_enabled")
@@ -0,0 +1,22 @@
"""merge feature flags and admin notifications heads
Revision ID: 8b1a2c7d9e4f
Revises: 6c3f4e2a1b90, d4f6a2b1c9e8
Create Date: 2026-04-07 22:15:00.000000
"""
from typing import Sequence, Union
revision: str = "8b1a2c7d9e4f"
down_revision: Union[str, tuple[str, str], None] = ("6c3f4e2a1b90", "d4f6a2b1c9e8")
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass
@@ -0,0 +1,76 @@
"""add member notifications
Revision ID: 9d3c5b7a1f2e
Revises: e2a1f9c4b6d3, f25d0f745a17
Create Date: 2026-04-01 11:30:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "9d3c5b7a1f2e"
down_revision: Union[str, tuple[str, str], None] = ("e2a1f9c4b6d3", "f25d0f745a17")
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column("members", sa.Column("notifications_enabled", sa.Boolean(), nullable=False, server_default=sa.true()))
op.add_column(
"site_settings",
sa.Column("automatic_member_notifications_enabled", sa.Boolean(), nullable=False, server_default=sa.true()),
)
op.add_column(
"site_settings",
sa.Column("nz_public_holiday_notifications_enabled", sa.Boolean(), nullable=False, server_default=sa.true()),
)
op.add_column(
"site_settings",
sa.Column("invoice_reminder_notifications_enabled", sa.Boolean(), nullable=False, server_default=sa.true()),
)
op.add_column(
"site_settings",
sa.Column("invoice_day_of_week", sa.Integer(), nullable=False, server_default="1"),
)
op.create_table(
"member_notification_dispatches",
sa.Column("member_id", sa.Uuid(), nullable=False),
sa.Column("notification_type", sa.String(length=64), nullable=False),
sa.Column("dispatch_key", sa.String(length=255), nullable=False),
sa.Column("metadata", sa.JSON(), nullable=True),
sa.Column("id", sa.Uuid(), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.ForeignKeyConstraint(["member_id"], ["members.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("member_id", "dispatch_key", name="uq_member_notification_dispatches_member_key"),
)
op.create_index(
op.f("ix_member_notification_dispatches_member_id"),
"member_notification_dispatches",
["member_id"],
unique=False,
)
op.create_index(
op.f("ix_member_notification_dispatches_notification_type"),
"member_notification_dispatches",
["notification_type"],
unique=False,
)
def downgrade() -> None:
op.drop_index(op.f("ix_member_notification_dispatches_notification_type"), table_name="member_notification_dispatches")
op.drop_index(op.f("ix_member_notification_dispatches_member_id"), table_name="member_notification_dispatches")
op.drop_table("member_notification_dispatches")
op.drop_column("site_settings", "invoice_day_of_week")
op.drop_column("site_settings", "invoice_reminder_notifications_enabled")
op.drop_column("site_settings", "nz_public_holiday_notifications_enabled")
op.drop_column("site_settings", "automatic_member_notifications_enabled")
op.drop_column("members", "notifications_enabled")
@@ -0,0 +1,126 @@
"""add members area tables
Revision ID: a1b2c3d4e5f6
Revises: f25d0f745a17
Create Date: 2026-03-31 12:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = 'a1b2c3d4e5f6'
down_revision: Union[str, None] = 'f25d0f745a17'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
'members',
sa.Column('email', sa.String(255), nullable=False),
sa.Column('hashed_password', sa.String(255), nullable=True),
sa.Column('first_name', sa.String(100), nullable=False),
sa.Column('last_name', sa.String(100), nullable=False),
sa.Column('phone', sa.String(50), nullable=True),
sa.Column('address', sa.String(500), nullable=True),
sa.Column('emergency_contact', sa.String(255), nullable=True),
sa.Column('is_claimed', sa.Boolean(), nullable=False, server_default='false'),
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='true'),
sa.Column('onboarding_data', sa.JSON(), nullable=True),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(op.f('ix_members_email'), 'members', ['email'], unique=True)
op.create_table(
'member_verification_codes',
sa.Column('member_id', sa.Uuid(), nullable=False),
sa.Column('code_hash', sa.String(255), nullable=False),
sa.Column('purpose', sa.String(20), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('used_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.ForeignKeyConstraint(['member_id'], ['members.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(op.f('ix_member_verification_codes_member_id'), 'member_verification_codes', ['member_id'], unique=False)
op.create_table(
'member_refresh_tokens',
sa.Column('member_id', sa.Uuid(), nullable=False),
sa.Column('token_hash', sa.String(255), nullable=False),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('revoked', sa.Boolean(), nullable=False, server_default='false'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.ForeignKeyConstraint(['member_id'], ['members.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(op.f('ix_member_refresh_tokens_member_id'), 'member_refresh_tokens', ['member_id'], unique=False)
op.create_table(
'walks',
sa.Column('member_id', sa.Uuid(), nullable=False),
sa.Column('walked_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('service_type', sa.String(50), nullable=False),
sa.Column('duration_minutes', sa.Integer(), nullable=False, server_default='60'),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('recorded_by', sa.String(255), nullable=True),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['member_id'], ['members.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(op.f('ix_walks_member_id'), 'walks', ['member_id'], unique=False)
op.create_table(
'bookings',
sa.Column('member_id', sa.Uuid(), nullable=False),
sa.Column('service_type', sa.String(50), nullable=False),
sa.Column('requested_date', sa.DateTime(timezone=True), nullable=True),
sa.Column('status', sa.String(20), nullable=False, server_default='pending'),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('admin_notes', sa.Text(), nullable=True),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['member_id'], ['members.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(op.f('ix_bookings_member_id'), 'bookings', ['member_id'], unique=False)
op.create_table(
'admin_messages',
sa.Column('member_id', sa.Uuid(), nullable=False),
sa.Column('subject', sa.String(255), nullable=False),
sa.Column('body', sa.Text(), nullable=False),
sa.Column('read_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('sent_by', sa.String(255), nullable=True),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.ForeignKeyConstraint(['member_id'], ['members.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(op.f('ix_admin_messages_member_id'), 'admin_messages', ['member_id'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_admin_messages_member_id'), table_name='admin_messages')
op.drop_table('admin_messages')
op.drop_index(op.f('ix_bookings_member_id'), table_name='bookings')
op.drop_table('bookings')
op.drop_index(op.f('ix_walks_member_id'), table_name='walks')
op.drop_table('walks')
op.drop_index(op.f('ix_member_refresh_tokens_member_id'), table_name='member_refresh_tokens')
op.drop_table('member_refresh_tokens')
op.drop_index(op.f('ix_member_verification_codes_member_id'), table_name='member_verification_codes')
op.drop_table('member_verification_codes')
op.drop_index(op.f('ix_members_email'), table_name='members')
op.drop_table('members')
@@ -0,0 +1,38 @@
"""add service pricing and member security controls
Revision ID: a4d9c7e18b21
Revises: f9c2d7a14b6e
Create Date: 2026-04-08 12:15:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "a4d9c7e18b21"
down_revision: Union[str, None] = "f9c2d7a14b6e"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column(
"site_settings",
sa.Column("service_pricing", sa.JSON(), nullable=False, server_default=sa.text("'{}'")),
)
op.add_column(
"members",
sa.Column("service_pricing_overrides", sa.JSON(), nullable=False, server_default=sa.text("'{}'")),
)
op.add_column(
"members",
sa.Column("force_two_factor", sa.Boolean(), nullable=True),
)
def downgrade() -> None:
op.drop_column("members", "force_two_factor")
op.drop_column("members", "service_pricing_overrides")
op.drop_column("site_settings", "service_pricing")
@@ -0,0 +1,53 @@
"""add message soft-delete and member reply
Revision ID: a7f3e2c1b8d4
Revises: f9c2d7a14b6e
Create Date: 2026-04-09 10:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "a7f3e2c1b8d4"
down_revision: Union[str, None] = "f9c2d7a14b6e"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Soft-delete support for admin_messages (member can dismiss/delete)
op.add_column(
"admin_messages",
sa.Column("deleted_at", sa.DateTime(timezone=True), nullable=True),
)
# Member reply messages — stored alongside admin messages in same table,
# distinguished by direction field.
op.add_column(
"admin_messages",
sa.Column(
"direction",
sa.String(16),
nullable=False,
server_default="inbound",
),
)
# reply_to_id links a member reply back to the original admin message
op.add_column(
"admin_messages",
sa.Column(
"reply_to_id",
sa.Uuid(as_uuid=True),
sa.ForeignKey("admin_messages.id", ondelete="SET NULL"),
nullable=True,
),
)
def downgrade() -> None:
op.drop_column("admin_messages", "reply_to_id")
op.drop_column("admin_messages", "direction")
op.drop_column("admin_messages", "deleted_at")
@@ -0,0 +1,22 @@
"""merge booking indexes and message reply heads
Revision ID: b2d4f1e8c9a3
Revises: c1e4b8f2a7d9, a7f3e2c1b8d4
Create Date: 2026-04-09 11:00:00.000000
"""
from typing import Sequence, Union
revision: str = "b2d4f1e8c9a3"
down_revision: Union[str, tuple[str, str], None] = ("c1e4b8f2a7d9", "a7f3e2c1b8d4")
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass
@@ -0,0 +1,52 @@
"""add audit logs
Revision ID: b3e7c9a2f1d4
Revises: 9d3c5b7a1f2e
Create Date: 2026-04-01 12:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "b3e7c9a2f1d4"
down_revision: Union[str, None] = "9d3c5b7a1f2e"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
"audit_logs",
sa.Column("id", sa.Uuid(), nullable=False),
sa.Column("timestamp", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("member_id", sa.Uuid(), nullable=True),
sa.Column("member_email", sa.String(length=255), nullable=True),
sa.Column("action_type", sa.String(length=64), nullable=False),
sa.Column("area", sa.String(length=255), nullable=False),
sa.Column("description", sa.String(length=500), nullable=False),
sa.Column("status", sa.String(length=16), nullable=False, server_default="success"),
sa.Column("booking_id", sa.Uuid(), nullable=True),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("error_detail", sa.Text(), nullable=True),
sa.Column("ip_address", sa.String(length=64), nullable=True),
sa.Column("user_agent", sa.String(length=512), nullable=True),
sa.Column("extra", sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(["member_id"], ["members.id"], ondelete="SET NULL"),
sa.ForeignKeyConstraint(["booking_id"], ["bookings.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index("ix_audit_logs_timestamp", "audit_logs", ["timestamp"], unique=False)
op.create_index("ix_audit_logs_member_id", "audit_logs", ["member_id"], unique=False)
op.create_index("ix_audit_logs_action_type", "audit_logs", ["action_type"], unique=False)
op.create_index("ix_audit_logs_status", "audit_logs", ["status"], unique=False)
def downgrade() -> None:
op.drop_index("ix_audit_logs_status", table_name="audit_logs")
op.drop_index("ix_audit_logs_action_type", table_name="audit_logs")
op.drop_index("ix_audit_logs_member_id", table_name="audit_logs")
op.drop_index("ix_audit_logs_timestamp", table_name="audit_logs")
op.drop_table("audit_logs")
@@ -0,0 +1,96 @@
"""add experiments
Revision ID: bd9f6a8b7c1d
Revises: 3419d4e56131
Create Date: 2026-03-30 23:40:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'bd9f6a8b7c1d'
down_revision: Union[str, None] = '3419d4e56131'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
'experiments',
sa.Column('experiment_key', sa.String(length=64), nullable=False),
sa.Column('name', sa.String(length=120), nullable=False),
sa.Column('description', sa.String(length=512), nullable=True),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('eligible_routes', sa.JSON(), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('experiment_key'),
)
op.create_index(op.f('ix_experiments_enabled'), 'experiments', ['enabled'], unique=False)
op.create_index(op.f('ix_experiments_experiment_key'), 'experiments', ['experiment_key'], unique=False)
op.create_table(
'experiment_variants',
sa.Column('experiment_id', sa.Uuid(), nullable=False),
sa.Column('variant_key', sa.String(length=64), nullable=False),
sa.Column('label', sa.String(length=120), nullable=False),
sa.Column('allocation', sa.Integer(), nullable=False),
sa.Column('is_control', sa.Boolean(), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.ForeignKeyConstraint(['experiment_id'], ['experiments.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('experiment_id', 'variant_key', name='uq_experiment_variants_experiment_variant'),
)
op.create_index(op.f('ix_experiment_variants_experiment_id'), 'experiment_variants', ['experiment_id'], unique=False)
op.create_table(
'experiment_events',
sa.Column('experiment_key', sa.String(length=64), nullable=False),
sa.Column('variant_key', sa.String(length=64), nullable=False),
sa.Column('session_id', sa.String(length=128), nullable=False),
sa.Column('user_id', sa.String(length=64), nullable=True),
sa.Column('path', sa.String(length=255), nullable=False),
sa.Column('event_type', sa.String(length=64), nullable=False),
sa.Column('conversion_value', sa.Numeric(precision=12, scale=2), nullable=True),
sa.Column('metadata', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.PrimaryKeyConstraint('id'),
)
op.create_index(op.f('ix_experiment_events_created_at'), 'experiment_events', ['created_at'], unique=False)
op.create_index(op.f('ix_experiment_events_event_type'), 'experiment_events', ['event_type'], unique=False)
op.create_index(op.f('ix_experiment_events_experiment_key'), 'experiment_events', ['experiment_key'], unique=False)
op.create_index(op.f('ix_experiment_events_path'), 'experiment_events', ['path'], unique=False)
op.create_index(op.f('ix_experiment_events_session_id'), 'experiment_events', ['session_id'], unique=False)
op.create_index(op.f('ix_experiment_events_user_id'), 'experiment_events', ['user_id'], unique=False)
op.create_index(op.f('ix_experiment_events_variant_key'), 'experiment_events', ['variant_key'], unique=False)
op.create_index('ix_experiment_events_experiment_variant_created_at', 'experiment_events', ['experiment_key', 'variant_key', 'created_at'], unique=False)
op.create_index('ix_experiment_events_session_created_at', 'experiment_events', ['session_id', 'created_at'], unique=False)
def downgrade() -> None:
op.drop_index('ix_experiment_events_session_created_at', table_name='experiment_events')
op.drop_index('ix_experiment_events_experiment_variant_created_at', table_name='experiment_events')
op.drop_index(op.f('ix_experiment_events_variant_key'), table_name='experiment_events')
op.drop_index(op.f('ix_experiment_events_user_id'), table_name='experiment_events')
op.drop_index(op.f('ix_experiment_events_session_id'), table_name='experiment_events')
op.drop_index(op.f('ix_experiment_events_path'), table_name='experiment_events')
op.drop_index(op.f('ix_experiment_events_experiment_key'), table_name='experiment_events')
op.drop_index(op.f('ix_experiment_events_event_type'), table_name='experiment_events')
op.drop_index(op.f('ix_experiment_events_created_at'), table_name='experiment_events')
op.drop_table('experiment_events')
op.drop_index(op.f('ix_experiment_variants_experiment_id'), table_name='experiment_variants')
op.drop_table('experiment_variants')
op.drop_index(op.f('ix_experiments_experiment_key'), table_name='experiments')
op.drop_index(op.f('ix_experiments_enabled'), table_name='experiments')
op.drop_table('experiments')
@@ -0,0 +1,36 @@
"""add booking performance indexes
Revision ID: c1e4b8f2a7d9
Revises: a4d9c7e18b21
Create Date: 2026-04-08 14:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
revision: str = 'c1e4b8f2a7d9'
down_revision: Union[str, None] = 'a4d9c7e18b21'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Speed up filtering bookings by status (most common admin query filter)
op.create_index('ix_bookings_status', 'bookings', ['status'])
# Speed up date-range queries and ordering used by the schedule view
op.create_index('ix_bookings_requested_date', 'bookings', ['requested_date'])
# Speed up the SSE signature query: max(updated_at) table scan → index scan
op.create_index('ix_bookings_updated_at', 'bookings', ['updated_at'])
# Composite: member + status used by member-facing /members/bookings endpoint
op.create_index('ix_bookings_member_id_status', 'bookings', ['member_id', 'status'])
def downgrade() -> None:
op.drop_index('ix_bookings_member_id_status', table_name='bookings')
op.drop_index('ix_bookings_updated_at', table_name='bookings')
op.drop_index('ix_bookings_requested_date', table_name='bookings')
op.drop_index('ix_bookings_status', table_name='bookings')
@@ -0,0 +1,43 @@
"""add member onboarding lifecycle
Revision ID: c7d2b6f4a9e1
Revises: a1b2c3d4e5f6
Create Date: 2026-03-31 23:25:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "c7d2b6f4a9e1"
down_revision: Union[str, None] = "a1b2c3d4e5f6"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column("members", sa.Column("member_status", sa.String(length=32), nullable=False, server_default="invited"))
op.add_column("members", sa.Column("claimed_at", sa.DateTime(timezone=True), nullable=True))
op.add_column("members", sa.Column("onboarding_completed_at", sa.DateTime(timezone=True), nullable=True))
op.add_column("members", sa.Column("contract_signed_at", sa.DateTime(timezone=True), nullable=True))
op.add_column("members", sa.Column("contract_signer_name", sa.String(length=255), nullable=True))
op.add_column("members", sa.Column("contract_version", sa.String(length=50), nullable=True))
op.add_column("members", sa.Column("activated_at", sa.DateTime(timezone=True), nullable=True))
op.create_index(op.f("ix_members_member_status"), "members", ["member_status"], unique=False)
op.execute("UPDATE members SET member_status = 'active' WHERE is_claimed = true")
op.execute("UPDATE members SET member_status = 'invited' WHERE is_claimed = false")
op.alter_column("members", "member_status", server_default=None)
def downgrade() -> None:
op.drop_index(op.f("ix_members_member_status"), table_name="members")
op.drop_column("members", "activated_at")
op.drop_column("members", "contract_version")
op.drop_column("members", "contract_signer_name")
op.drop_column("members", "contract_signed_at")
op.drop_column("members", "onboarding_completed_at")
op.drop_column("members", "claimed_at")
op.drop_column("members", "member_status")
@@ -0,0 +1,28 @@
"""add admin notifications clear cutoff
Revision ID: d4f6a2b1c9e8
Revises: b3e7c9a2f1d4
Create Date: 2026-04-07 20:45:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "d4f6a2b1c9e8"
down_revision: Union[str, None] = "b3e7c9a2f1d4"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column(
"site_settings",
sa.Column("admin_notifications_cleared_before", sa.DateTime(timezone=True), nullable=True),
)
def downgrade() -> None:
op.drop_column("site_settings", "admin_notifications_cleared_before")
@@ -0,0 +1,53 @@
"""add contact leads
Revision ID: e2a1f9c4b6d3
Revises: c7d2b6f4a9e1
Create Date: 2026-03-31 23:55:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "e2a1f9c4b6d3"
down_revision: Union[str, None] = "c7d2b6f4a9e1"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
"contact_leads",
sa.Column("full_name", sa.String(length=255), nullable=False),
sa.Column("email", sa.String(length=255), nullable=False),
sa.Column("phone", sa.String(length=50), nullable=True),
sa.Column("requested_services", sa.String(length=255), nullable=True),
sa.Column("pet_name", sa.String(length=100), nullable=True),
sa.Column("pet_breed", sa.String(length=100), nullable=True),
sa.Column("suburb", sa.String(length=100), nullable=True),
sa.Column("service_area_status", sa.String(length=32), nullable=True),
sa.Column("message", sa.Text(), nullable=True),
sa.Column("source", sa.String(length=50), nullable=False),
sa.Column("status", sa.String(length=32), nullable=False, server_default="invite"),
sa.Column("notes", sa.Text(), nullable=True),
sa.Column("metadata", sa.JSON(), nullable=True),
sa.Column("invited_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("invited_member_id", sa.Uuid(), nullable=True),
sa.Column("id", sa.Uuid(), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.ForeignKeyConstraint(["invited_member_id"], ["members.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_contact_leads_email"), "contact_leads", ["email"], unique=False)
op.create_index(op.f("ix_contact_leads_invited_member_id"), "contact_leads", ["invited_member_id"], unique=False)
op.create_index(op.f("ix_contact_leads_status"), "contact_leads", ["status"], unique=False)
def downgrade() -> None:
op.drop_index(op.f("ix_contact_leads_status"), table_name="contact_leads")
op.drop_index(op.f("ix_contact_leads_invited_member_id"), table_name="contact_leads")
op.drop_index(op.f("ix_contact_leads_email"), table_name="contact_leads")
op.drop_table("contact_leads")
@@ -0,0 +1,39 @@
"""add experiment cookie name
Revision ID: f25d0f745a17
Revises: bd9f6a8b7c1d
Create Date: 2026-03-31 00:15:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = 'f25d0f745a17'
down_revision: Union[str, None] = 'bd9f6a8b7c1d'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column('experiments', sa.Column('cookie_name', sa.String(length=96), nullable=True))
op.execute(
"""
UPDATE experiments
SET cookie_name =
CASE experiment_key
WHEN 'homepage_hero_test' THEN 'exp_homepage_hero'
WHEN 'pricing_cta_test' THEN 'exp_pricing_cta'
ELSE 'exp_' || experiment_key
END
"""
)
op.alter_column('experiments', 'cookie_name', nullable=False)
op.create_unique_constraint('uq_experiments_cookie_name', 'experiments', ['cookie_name'])
def downgrade() -> None:
op.drop_constraint('uq_experiments_cookie_name', 'experiments', type_='unique')
op.drop_column('experiments', 'cookie_name')
@@ -0,0 +1,38 @@
"""add global site control flags
Revision ID: f9c2d7a14b6e
Revises: 8b1a2c7d9e4f
Create Date: 2026-04-08 10:20:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "f9c2d7a14b6e"
down_revision: Union[str, None] = "8b1a2c7d9e4f"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column(
"site_settings",
sa.Column("two_factor_enabled", sa.Boolean(), nullable=False, server_default=sa.true()),
)
op.add_column(
"site_settings",
sa.Column("audit_history_enabled", sa.Boolean(), nullable=False, server_default=sa.true()),
)
op.add_column(
"site_settings",
sa.Column("experiments_enabled", sa.Boolean(), nullable=False, server_default=sa.true()),
)
def downgrade() -> None:
op.drop_column("site_settings", "experiments_enabled")
op.drop_column("site_settings", "audit_history_enabled")
op.drop_column("site_settings", "two_factor_enabled")
+306
View File
@@ -0,0 +1,306 @@
"""
Goodwalk Flask CMS Backend
--------------------------
Content stored in SQLite (data/goodwalk.db).
Seeds from data/content.json on first run.
Admin API protected by HTTP Basic Auth (ADMIN_PASSWORD env var).
In production, also serves the static Svelte build from ../frontend/build.
"""
import json
import logging
import os
import sqlite3
import traceback
from datetime import datetime, timezone
from functools import wraps
from pathlib import Path
from flask import Flask, jsonify, request, abort, send_from_directory, Response
from flask_cors import CORS
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s [%(levelname)s] %(message)s',
datefmt='%H:%M:%S',
)
log = logging.getLogger(__name__)
app = Flask(__name__, static_folder=None)
app.config['PROPAGATE_EXCEPTIONS'] = True
CORS(app)
@app.errorhandler(Exception)
def handle_exception(e):
log.error('Unhandled exception on %s %s\n%s', request.method, request.path, traceback.format_exc())
return jsonify({'error': str(e)}), 500
DB_FILE = Path(__file__).parent / 'data' / 'goodwalk.db'
SEED_FILE = Path(__file__).parent / 'data' / 'content.json'
STATIC_DIR = Path(__file__).parent.parent / 'frontend' / 'build'
ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD', 'goodwalk-admin')
# ── DB helpers ────────────────────────────────────────────────────────────────
def get_db():
conn = sqlite3.connect(DB_FILE)
conn.row_factory = sqlite3.Row
return conn
def init_db():
DB_FILE.parent.mkdir(exist_ok=True)
log.info('DB file: %s', DB_FILE)
conn = get_db()
conn.execute('''
CREATE TABLE IF NOT EXISTS content_sections (
key TEXT PRIMARY KEY,
data TEXT NOT NULL,
updated_at TEXT NOT NULL
)
''')
conn.commit()
count = conn.execute('SELECT COUNT(*) FROM content_sections').fetchone()[0]
log.info('DB has %d section(s)', count)
if count == 0 and SEED_FILE.exists():
log.info('Seeding from %s', SEED_FILE)
_seed_from_json(conn)
log.info('Seed complete')
elif count == 0:
log.warning('No seed file found at %s — DB is empty', SEED_FILE)
conn.close()
def _seed_from_json(conn):
with open(SEED_FILE, 'r', encoding='utf-8') as f:
content = json.load(f)
now = datetime.now(timezone.utc).isoformat()
pages = content.get('pages', {})
sections = {
'siteSettings': content.get('siteSettings', {}),
'navigation': content.get('navigation', {}),
'footer': content.get('footer', {}),
'testimonials': content.get('testimonials', []),
'pages.home': pages.get('home', {}),
'pages.packWalks': pages.get('packWalks', {}),
'pages.oneOnOneWalks': pages.get('oneOnOneWalks', {}),
'pages.puppyVisits': pages.get('puppyVisits', {}),
'pages.pricing': pages.get('pricing', {}),
'pages.about': pages.get('about', {}),
'pages.contact': pages.get('contact', {}),
'onboarding': _default_onboarding(),
}
for key, data in sections.items():
conn.execute(
'INSERT OR IGNORE INTO content_sections (key, data, updated_at) VALUES (?, ?, ?)',
(key, json.dumps(data, ensure_ascii=False), now)
)
conn.commit()
def _default_onboarding():
return {
'heading': 'Joining the Tiny Gang',
'intro': "Here's what to expect when you start with Goodwalk.",
'steps': [
{
'step': 1,
'title': 'Get in touch',
'body': "Fill out our contact form or send us an email. Tell us about your dog — breed, age, temperament — and we'll get back to you within 24 hours."
},
{
'step': 2,
'title': 'Free Meet & Greet',
'body': "We come to you for a no-obligation meet and greet. We'll meet your dog, answer your questions, and make sure we're the right fit for each other."
},
{
'step': 3,
'title': 'Assessment Walks',
'body': 'Your dog joins us for a minimum of two assessment walks. This lets us understand their personality, energy level, and compatibility with the current Gang.'
},
{
'step': 4,
'title': "Join the Gang!",
'body': "Once cleared, your dog becomes a permanent Tiny Gang member. We agree on walk days, set up invoicing, and you're good to go."
},
],
'requirements': [
'Current Auckland Council dog registration',
'Up-to-date vaccinations (C5 recommended)',
'Must pass two assessment walks',
'Dog must be sociable with other dogs',
],
}
def get_section(key):
conn = get_db()
row = conn.execute('SELECT data FROM content_sections WHERE key = ?', (key,)).fetchone()
conn.close()
return json.loads(row['data']) if row else None
# ── Auth ──────────────────────────────────────────────────────────────────────
def require_admin(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or auth.password != ADMIN_PASSWORD:
return Response(
'Authentication required', 401,
{'WWW-Authenticate': 'Basic realm="Goodwalk Admin"'}
)
return f(*args, **kwargs)
return decorated
# ── Request logging ──────────────────────────────────────────────────────────
@app.before_request
def log_request():
log.debug('%s %s', request.method, request.path)
@app.after_request
def log_response(response):
log.debug('%s %s %s', request.method, request.path, response.status_code)
return response
# ── Public API ────────────────────────────────────────────────────────────────
@app.route('/api/site-settings')
def site_settings():
return jsonify(get_section('siteSettings') or {})
@app.route('/api/navigation')
def navigation():
return jsonify(get_section('navigation') or {'items': []})
@app.route('/api/footer')
def footer():
return jsonify(get_section('footer') or {})
@app.route('/api/testimonials')
def testimonials():
return jsonify(get_section('testimonials') or [])
@app.route('/api/onboarding')
def onboarding():
return jsonify(get_section('onboarding') or {})
@app.route('/api/pages/home')
def page_home():
return jsonify(get_section('pages.home') or {})
@app.route('/api/pages/pack-walks')
def page_pack_walks():
return jsonify(get_section('pages.packWalks') or {})
@app.route('/api/pages/1-1-walks')
def page_one_on_one():
return jsonify(get_section('pages.oneOnOneWalks') or {})
@app.route('/api/pages/puppy-visits')
def page_puppy_visits():
return jsonify(get_section('pages.puppyVisits') or {})
@app.route('/api/pages/pricing')
def page_pricing():
return jsonify(get_section('pages.pricing') or {})
@app.route('/api/pages/about')
def page_about():
return jsonify(get_section('pages.about') or {})
@app.route('/api/pages/contact')
def page_contact():
return jsonify(get_section('pages.contact') or {})
# ── Contact form ──────────────────────────────────────────────────────────────
@app.route('/api/contact', methods=['POST'])
def contact_submit():
body = request.get_json(force=True, silent=True)
if not body:
return jsonify({'error': 'Invalid request'}), 400
for field in ['name', 'email', 'message']:
if not body.get(field, '').strip():
return jsonify({'error': f'{field} is required'}), 422
print(f"[CONTACT] {body.get('name')} <{body.get('email')}> — {body.get('message', '')[:80]}")
return jsonify({'success': True})
# ── Admin API ─────────────────────────────────────────────────────────────────
@app.route('/api/admin/sections')
@require_admin
def admin_list_sections():
conn = get_db()
rows = conn.execute(
'SELECT key, updated_at FROM content_sections ORDER BY key'
).fetchall()
conn.close()
return jsonify([{'key': r['key'], 'updated_at': r['updated_at']} for r in rows])
@app.route('/api/admin/sections/<path:key>')
@require_admin
def admin_get_section(key):
conn = get_db()
row = conn.execute(
'SELECT key, data, updated_at FROM content_sections WHERE key = ?', (key,)
).fetchone()
conn.close()
if not row:
abort(404)
return jsonify({
'key': row['key'],
'data': json.loads(row['data']),
'updated_at': row['updated_at'],
})
@app.route('/api/admin/sections/<path:key>', methods=['PUT'])
@require_admin
def admin_update_section(key):
body = request.get_json(force=True, silent=True)
if body is None:
return jsonify({'error': 'Invalid JSON'}), 400
now = datetime.now(timezone.utc).isoformat()
conn = get_db()
conn.execute(
'''INSERT INTO content_sections (key, data, updated_at) VALUES (?, ?, ?)
ON CONFLICT(key) DO UPDATE SET data = excluded.data, updated_at = excluded.updated_at''',
(key, json.dumps(body, ensure_ascii=False), now)
)
conn.commit()
conn.close()
return jsonify({'success': True, 'key': key, 'updated_at': now})
# ── Health ────────────────────────────────────────────────────────────────────
@app.route('/api/health')
def health():
return jsonify({'status': 'ok'})
# ── Static frontend (production) ──────────────────────────────────────────────
if STATIC_DIR.exists():
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def serve_static(path):
file_path = STATIC_DIR / path
if path and file_path.is_file():
return send_from_directory(STATIC_DIR, path)
return send_from_directory(STATIC_DIR, 'index.html')
if __name__ == '__main__':
init_db()
app.run(debug=True, port=5000)
View File
View File
+58
View File
@@ -0,0 +1,58 @@
"""
FastAPI dependency for extracting and validating the current authenticated user.
"""
import uuid
from fastapi import Depends, HTTPException, status
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from jose import JWTError
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.auth.jwt import verify_access_token
from app.database import get_db
from app.models.user import User
bearer_scheme = HTTPBearer()
async def get_current_user(
credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme),
db: AsyncSession = Depends(get_db),
) -> User:
"""
Extract Bearer token from Authorization header, verify it,
and return the corresponding User from the database.
Raises:
401 HTTPException if token is missing, invalid, or expired.
401 HTTPException if the user no longer exists or is inactive.
"""
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = verify_access_token(credentials.credentials)
user_id: str = payload.get("sub")
if user_id is None:
raise credentials_exception
user_uuid = uuid.UUID(user_id)
except (JWTError, ValueError):
raise credentials_exception
result = await db.execute(select(User).where(User.id == user_uuid))
user = result.scalars().first()
if user is None:
raise credentials_exception
if not user.is_active:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Inactive user account",
)
return user
+84
View File
@@ -0,0 +1,84 @@
"""
Explicit JWT creation and verification.
No ORM magic — all logic is auditable here.
"""
import hashlib
import secrets
import uuid
from datetime import datetime, timedelta, timezone
from typing import Optional
from jose import JWTError, jwt
from app.config import settings
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str:
"""
Create a signed JWT access token.
Args:
data: Payload data to encode (must include 'sub' key).
expires_delta: Token lifetime. Defaults to ACCESS_TOKEN_EXPIRE_MINUTES.
Returns:
Encoded JWT string.
"""
to_encode = data.copy()
if expires_delta is not None:
expire = datetime.now(timezone.utc) + expires_delta
else:
expire = datetime.now(timezone.utc) + timedelta(
minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES
)
to_encode.update({"exp": expire, "iat": datetime.now(timezone.utc)})
encoded_jwt = jwt.encode(
to_encode,
settings.SECRET_KEY,
algorithm=settings.ALGORITHM,
)
return encoded_jwt
def verify_access_token(token: str) -> dict:
"""
Verify and decode a JWT access token.
Args:
token: Encoded JWT string.
Returns:
Decoded payload dict.
Raises:
JWTError: If the token is invalid or expired.
"""
payload = jwt.decode(
token,
settings.SECRET_KEY,
algorithms=[settings.ALGORITHM],
)
return payload
def hash_refresh_token(plaintext: str) -> str:
"""SHA-256 hash a refresh token for storage. Fast is fine — it's already a random secret."""
return hashlib.sha256(plaintext.encode("utf-8")).hexdigest()
def create_refresh_token() -> tuple[str, str]:
"""
Generate a cryptographically secure refresh token.
Returns:
Tuple of (plaintext_token, hashed_token).
Store only the hash; send the plaintext to the client.
"""
plaintext = secrets.token_urlsafe(64)
return plaintext, hash_refresh_token(plaintext)
def get_token_expiry(days: Optional[int] = None) -> datetime:
"""Return a UTC datetime for token expiry."""
expire_days = days if days is not None else settings.REFRESH_TOKEN_EXPIRE_DAYS
return datetime.now(timezone.utc) + timedelta(days=expire_days)
+71
View File
@@ -0,0 +1,71 @@
"""
FastAPI dependency helpers for authenticated member access.
Member tokens carry role='member' in the JWT payload.
"""
import uuid
from fastapi import Depends, HTTPException, status
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from jose import JWTError
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.auth.jwt import verify_access_token
from app.database import get_db
from app.models.member import Member
bearer_scheme = HTTPBearer()
async def _get_member_from_token(
credentials: HTTPAuthorizationCredentials,
db: AsyncSession,
) -> Member:
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = verify_access_token(credentials.credentials)
if payload.get("role") != "member":
raise credentials_exception
member_id: str = payload.get("sub")
if member_id is None:
raise credentials_exception
member_uuid = uuid.UUID(member_id)
except (JWTError, ValueError):
raise credentials_exception
result = await db.execute(select(Member).where(Member.id == member_uuid))
member = result.scalars().first()
if member is None:
raise credentials_exception
if not member.is_active:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Inactive member account",
)
return member
async def get_authenticated_member(
credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme),
db: AsyncSession = Depends(get_db),
) -> Member:
return await _get_member_from_token(credentials, db)
async def get_current_member(
member: Member = Depends(get_authenticated_member),
) -> Member:
if member.member_status != "active":
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Member onboarding is not complete.",
)
return member
+21
View File
@@ -0,0 +1,21 @@
"""
Password hashing and verification using bcrypt directly.
"""
import bcrypt
def hash_password(password: str) -> str:
"""Hash a plaintext password using bcrypt."""
return bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8")
def verify_password(plain_password: str, hashed_password: str) -> bool:
"""Verify a plaintext password against a bcrypt hash."""
try:
return bcrypt.checkpw(
plain_password.encode("utf-8"),
hashed_password.encode("utf-8"),
)
except ValueError:
# bcrypt 4.x raises for oversized inputs; treat them as invalid credentials.
return False
+39
View File
@@ -0,0 +1,39 @@
from pydantic_settings import BaseSettings
from pydantic import field_validator
from typing import List
class Settings(BaseSettings):
DATABASE_URL: str = "postgresql+asyncpg://postgres:postgres@localhost:5432/goodwalk"
SECRET_KEY: str = "change-me-to-a-long-random-secret"
ACCESS_TOKEN_EXPIRE_MINUTES: int = 15
REFRESH_TOKEN_EXPIRE_DAYS: int = 7
ALGORITHM: str = "HS256"
ALLOWED_ORIGINS: str = "http://localhost:5173,https://www.goodwalk.co.nz"
ENABLE_DOCS: bool = False
SITE_URL: str = "http://localhost:5173"
MEMBERS_URL: str = "http://localhost:5173/members"
# Email — set EMAIL_BACKEND=console (default) to print to stdout during dev
EMAIL_BACKEND: str = "console"
SMTP_HOST: str = ""
SMTP_PORT: int = 587
SMTP_USE_TLS: bool = True
SMTP_USER: str = ""
SMTP_PASSWORD: str = ""
EMAIL_FROM: str = "noreply@goodwalk.co.nz"
@field_validator("ALLOWED_ORIGINS", mode="before")
@classmethod
def parse_allowed_origins(cls, v: str) -> str:
# Keep as string; we parse to list via property
return v
@property
def allowed_origins_list(self) -> List[str]:
return [origin.strip() for origin in self.ALLOWED_ORIGINS.split(",") if origin.strip()]
model_config = {"env_file": ".env", "extra": "ignore"}
settings = Settings()
+29
View File
@@ -0,0 +1,29 @@
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
from typing import AsyncGenerator
from app.config import settings
engine = create_async_engine(
settings.DATABASE_URL,
echo=False,
pool_pre_ping=True,
)
AsyncSessionLocal = async_sessionmaker(
bind=engine,
class_=AsyncSession,
expire_on_commit=False,
autoflush=False,
autocommit=False,
)
async def get_db() -> AsyncGenerator[AsyncSession, None]:
async with AsyncSessionLocal() as session:
try:
yield session
await session.commit()
except Exception:
await session.rollback()
raise
finally:
await session.close()
+3
View File
@@ -0,0 +1,3 @@
from app.experiments.registry import EXPERIMENT_REGISTRY
__all__ = ["EXPERIMENT_REGISTRY"]
+46
View File
@@ -0,0 +1,46 @@
EXPERIMENT_REGISTRY = {
"homepage_hero_test": {
"experiment_key": "homepage_hero_test",
"cookie_name": "exp_homepage_hero",
"name": "Homepage hero test",
"description": "Homepage hero messaging and CTA emphasis.",
"enabled": True,
"eligible_routes": ["/"],
"variants": [
{
"variant_key": "control",
"label": "Current hero copy",
"allocation": 50,
"is_control": True,
},
{
"variant_key": "tiny_gang_social_proof",
"label": "Tiny Gang social proof",
"allocation": 50,
"is_control": False,
},
],
},
"pricing_cta_test": {
"experiment_key": "pricing_cta_test",
"cookie_name": "exp_pricing_cta",
"name": "Pricing CTA test",
"description": "Pricing CTA emphasis without changing page meaning.",
"enabled": True,
"eligible_routes": ["/our-pricing"],
"variants": [
{
"variant_key": "control",
"label": "Book now CTA",
"allocation": 50,
"is_control": True,
},
{
"variant_key": "meet_greet_emphasis",
"label": "Meet and greet emphasis",
"allocation": 50,
"is_control": False,
},
],
},
}
+145
View File
@@ -0,0 +1,145 @@
import asyncio
import traceback
import uuid
from contextlib import asynccontextmanager, suppress
from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
from jose import JWTError
from slowapi import _rate_limit_exceeded_handler
from slowapi.errors import RateLimitExceeded
from sqlalchemy import select
from starlette.middleware.base import BaseHTTPMiddleware
from app.config import settings
from app.database import AsyncSessionLocal, engine
from app.services.experiments import sync_experiment_registry
from app.services.notifications import notification_automation_loop
from app.middleware.rate_limit import limiter
from app.middleware.logging import RequestLogMiddleware
from app.routers import auth, pages, posts, settings as settings_router, sections
from app.routers import analytics as analytics_router
from app.routers import audit as audit_router
from app.routers import contact as contact_router
from app.routers import experiments as experiments_router
from app.routers import members as members_router
@asynccontextmanager
async def lifespan(app: FastAPI):
async with AsyncSessionLocal() as session:
await sync_experiment_registry(session)
await session.commit()
notification_task = asyncio.create_task(notification_automation_loop())
try:
yield
finally:
notification_task.cancel()
with suppress(asyncio.CancelledError):
await notification_task
await engine.dispose()
class SecurityHeadersMiddleware(BaseHTTPMiddleware):
"""Apply baseline browser-facing hardening headers to every response."""
async def dispatch(self, request: Request, call_next):
response = await call_next(request)
response.headers["X-Content-Type-Options"] = "nosniff"
response.headers["X-Frame-Options"] = "DENY"
response.headers["Content-Security-Policy"] = (
"default-src 'none'; frame-ancestors 'none'; base-uri 'none'; form-action 'none'"
)
response.headers["Strict-Transport-Security"] = (
"max-age=31536000; includeSubDomains"
)
response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin"
return response
app = FastAPI(
title="Goodwalk CMS API",
version="1.0.0",
description="CMS API for the Goodwalk marketing site",
lifespan=lifespan,
docs_url="/docs" if settings.ENABLE_DOCS else None,
redoc_url="/redoc" if settings.ENABLE_DOCS else None,
)
# Rate limiter
app.state.limiter = limiter
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
# CORS
app.add_middleware(
CORSMiddleware,
allow_origins=settings.allowed_origins_list,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.add_middleware(SecurityHeadersMiddleware)
app.add_middleware(RequestLogMiddleware)
# Routers — all under /api/v1/
API_PREFIX = "/api/v1"
app.include_router(pages.router, prefix=API_PREFIX)
app.include_router(posts.router, prefix=API_PREFIX)
app.include_router(settings_router.router, prefix=API_PREFIX)
app.include_router(auth.router, prefix=API_PREFIX)
# Legacy-compatible section endpoints (no /api/v1 prefix — paths match existing frontend)
app.include_router(sections.router)
# Analytics — ingest endpoint is public (/api/analytics/event), summary is authed (/api/v1/analytics/summary)
app.include_router(analytics_router.router)
app.include_router(contact_router.router)
app.include_router(experiments_router.router)
app.include_router(members_router.router, prefix=API_PREFIX)
app.include_router(audit_router.router, prefix=API_PREFIX)
@app.exception_handler(Exception)
async def unhandled_exception_handler(request: Request, exc: Exception):
"""Catch unhandled exceptions, log them for authenticated members, return 500."""
auth_header = request.headers.get("Authorization", "")
if auth_header.startswith("Bearer "):
token = auth_header[7:]
try:
from app.auth.jwt import verify_access_token
from app.models.member import Member
from app.services.audit import log_audit
payload = verify_access_token(token)
if payload.get("role") == "member":
member_uuid = uuid.UUID(payload["sub"])
async with AsyncSessionLocal() as session:
result = await session.execute(
select(Member).where(Member.id == member_uuid)
)
member = result.scalars().first()
await log_audit(
session,
member_id=member_uuid,
member_email=member.email if member else None,
action_type="error",
area=str(request.url.path),
description=f"Unhandled error: {type(exc).__name__}",
status="error",
error_message=str(exc)[:500],
error_detail=traceback.format_exc()[:4000],
ip_address=request.client.host if request.client else None,
user_agent=request.headers.get("User-Agent"),
)
await session.commit()
except (JWTError, ValueError, Exception):
pass # Never let audit logging suppress the original error response
return JSONResponse(status_code=500, content={"error": "Internal server error"})
@app.get("/health", tags=["Health"])
async def health_check():
return {"status": "ok"}
View File
+293
View File
@@ -0,0 +1,293 @@
"""
Request logging middleware.
Prints a clean, colour-coded line for every meaningful HTTP request.
Context-aware: pulls the email, member status, service type, etc. from the
request body for the most important endpoints so you can read the log without
needing to replay the request.
Format
------
METHOD /path/to/endpoint STATUS timing origin
↳ human-readable context (when relevant)
Localhost / loopback addresses are rendered as local:PORT rather than the
raw IP.
"""
from __future__ import annotations
import io
import json
import sys
import time
from typing import Optional
from rich.console import Console
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.requests import Request
# Force UTF-8 on Windows so arrow / symbol characters render correctly in any
# terminal (Windows Terminal, VS Code, PowerShell). On other platforms the
# default encoding is already UTF-8.
def _make_console() -> Console:
if sys.platform == "win32" and hasattr(sys.stdout, "buffer"):
out = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8", line_buffering=True)
return Console(highlight=False, markup=True, file=out)
return Console(highlight=False, markup=True)
_console = _make_console()
# ── Paths that are too noisy to log ───────────────────────────────────────────
_SKIP = frozenset({"/health", "/favicon.ico", "/robots.txt"})
_BODY_METHODS = frozenset({"POST", "PUT", "PATCH"})
# ── Colour maps ───────────────────────────────────────────────────────────────
_METHOD_STYLE: dict[str, str] = {
"GET": "bold #6ea8fe", # soft blue
"POST": "bold #75b798", # soft green
"PUT": "bold #e6a817", # amber
"PATCH": "bold #c586c0", # lavender
"DELETE": "bold #f28b82", # soft red
"HEAD": "dim",
"OPTIONS": "dim",
}
def _status_style(code: int) -> str:
if code < 300:
return "bold green"
if code < 400:
return "bold cyan"
if code < 500:
return "bold yellow"
return "bold red"
def _timing_style(ms: float) -> str:
if ms < 200:
return "white"
if ms < 1_000:
return "yellow"
return "bold red"
# ── Helpers ───────────────────────────────────────────────────────────────────
_LOCAL_HOSTS = {"127.0.0.1", "::1", "0.0.0.0", "localhost", "::ffff:127.0.0.1"}
def _origin(request: Request) -> str:
host = request.client.host if request.client else "unknown"
port = request.url.port or 8000
if host in _LOCAL_HOSTS:
return f"[dim]local:{port}[/dim]"
return f"[dim]{host}[/dim]"
def _body(raw: bytes) -> Optional[dict]:
if not raw:
return None
try:
obj = json.loads(raw)
return obj if isinstance(obj, dict) else None
except (json.JSONDecodeError, ValueError):
return None
def _shorten(path: str, width: int = 58) -> str:
"""Left-pad the path to *width* chars, truncating with ellipsis if needed."""
if len(path) > width:
path = path[: width - 1] + ""
return f"{path:<{width}}"
# ── Context extraction ────────────────────────────────────────────────────────
# Each branch returns a *markup* string (may contain [colour] tags) or None.
def _context(path: str, method: str, data: Optional[dict], status: int) -> Optional[str]: # noqa: C901
d = data or {}
em = d.get("email", "")
# ── Admin auth ────────────────────────────────────────────────────────────
if path.endswith("/auth/login") and "/members/" not in path:
if status < 400:
return f"[dim]admin[/dim] · {em}"
return f"[red]✗[/red] bad credentials · {em}"
# ── Member auth ───────────────────────────────────────────────────────────
if "/members/auth/login/verify" in path:
if status < 400:
return f"[dim]member 2FA[/dim] · {em} · [green]verified ✓[/green]"
return f"[red]✗[/red] bad 2FA code · {em}"
if "/members/auth/login" in path and path.endswith("/login"):
if status < 400:
return f"[dim]member login[/dim] · {em}"
return f"[red]✗[/red] bad password · {em}"
if "/members/auth/refresh" in path:
return None # token rotation — no useful body detail
# ── Claim flow ────────────────────────────────────────────────────────────
if "/members/claim/request" in path:
return f"[dim]claim request[/dim] · {em}"
if "/members/claim/complete" in path:
if status < 400:
return f"[green]account claimed[/green] · {em}"
return f"[red]✗[/red] claim failed · {em}"
# ── Member: profile ───────────────────────────────────────────────────────
if method == "PUT" and path.endswith("/members/me"):
fields = [k for k in d]
if fields:
return "[dim]updated[/dim] · " + ", ".join(fields)
return None
# ── Member: onboarding ────────────────────────────────────────────────────
if "/members/onboarding/contract" in path:
signer = d.get("signer_name", "")
if status < 400:
return f"[green]contract signed[/green] · {signer}"
return None
if method == "PUT" and "/members/onboarding" in path:
if d.get("complete_onboarding"):
return "[dim]onboarding complete[/dim] → [yellow]pending_contract[/yellow]"
return None
# ── Member: bookings ──────────────────────────────────────────────────────
if method == "POST" and path.endswith("/members/bookings"):
svc = d.get("service_type", "")
notes = d.get("notes", "")
label = _service_label(svc)
parts = [label] + ([notes[:50]] if notes else [])
return "[dim]booking request[/dim] · " + " · ".join(p for p in parts if p)
# ── Admin: create member ──────────────────────────────────────────────────
if (
method == "POST"
and "/admin/members" in path
and not any(seg in path for seg in ("/activate", "/walks", "/bookings", "/messages"))
):
first = d.get("first_name", "")
last = d.get("last_name", "")
em2 = d.get("email", "")
name = f"{first} {last}".strip()
parts = [n for n in (name, em2) if n]
return "[dim]new member[/dim] · " + " · ".join(parts)
if method == "POST" and "/admin/members/" in path and path.endswith("/activate"):
return "status → [green]active ✓[/green]"
if method == "PUT" and "/admin/members/" in path:
s = d.get("member_status")
if s:
return f"status → [cyan]{s}[/cyan]"
return None
# ── Admin: walks ──────────────────────────────────────────────────────────
if method == "POST" and "/admin/walks" in path:
svc = _service_label(d.get("service_type", ""))
dur = d.get("duration_minutes", "")
parts = [svc] + ([f"{dur} min"] if dur else [])
return "[dim]walk recorded[/dim] · " + " · ".join(p for p in parts if p)
# ── Admin: messages ───────────────────────────────────────────────────────
if method == "POST" and "/admin/messages" in path:
subject = d.get("subject", "")
return f"[dim]message sent[/dim] · {subject}" if subject else "[dim]message sent[/dim]"
# ── Admin: bookings ───────────────────────────────────────────────────────
if method == "PUT" and "/admin/bookings/" in path:
s = d.get("status")
if s:
_colour = {"confirmed": "green", "cancelled": "red", "completed": "cyan"}.get(s, "yellow")
return f"status → [{_colour}]{s}[/{_colour}]"
if d.get("admin_notes"):
return "[dim]admin notes updated[/dim]"
return None
# ── Admin: notifications ──────────────────────────────────────────────────
if method == "POST" and "/admin/notifications/run" in path:
return "[dim]notification run triggered[/dim]"
if method == "PUT" and "/admin/notifications/settings" in path:
keys = list(d.keys())
return "[dim]settings updated[/dim] · " + ", ".join(keys) if keys else None
# ── Contact leads ─────────────────────────────────────────────────────────
if "/contact" in path and method == "POST":
name = d.get("full_name") or d.get("name", "")
pet = d.get("pet_name", "")
email_fallback = d.get("email", "")
parts = [name or email_fallback] + ([f"dog: {pet}"] if pet else [])
return "[dim]lead[/dim] · " + " · ".join(p for p in parts if p)
# ── Generic 4xx hints ─────────────────────────────────────────────────────
if status == 401:
return "[red]✗[/red] unauthorized"
if status == 403:
return "[red]✗[/red] forbidden"
if status == 422:
return "[yellow]⚠[/yellow] validation error"
if status == 429:
return "[yellow]⚠[/yellow] rate limited"
return None
def _service_label(svc: str) -> str:
return {"pack_walk": "Pack Walk", "1_1_walk": "1-1 Walk", "puppy_visit": "Puppy Visit"}.get(svc, svc)
# ── Middleware ────────────────────────────────────────────────────────────────
class RequestLogMiddleware(BaseHTTPMiddleware):
"""
Logs every non-trivial HTTP request to the console via Rich.
Body reads are cached by Starlette's Request.body() so downstream
handlers always see the full body unchanged.
"""
async def dispatch(self, request: Request, call_next):
path = request.url.path
method = request.method
if path in _SKIP or method == "OPTIONS":
return await call_next(request)
# Read and cache body before handing to the route handler.
raw = b""
if method in _BODY_METHODS:
raw = await request.body() # Starlette caches in request._body
data = _body(raw)
t0 = time.perf_counter()
response = await call_next(request)
elapsed = (time.perf_counter() - t0) * 1_000
status = response.status_code
ctx = _context(path, method, data, status)
origin = _origin(request)
method_w = f"{method:<7}"
timing = f"{elapsed:>7.1f}ms"
path_w = _shorten(path)
method_styled = f"[{_METHOD_STYLE.get(method, 'white')}]{method_w}[/]"
status_styled = f"[{_status_style(status)}]{status}[/]"
timing_styled = f"[{_timing_style(elapsed)}]{timing}[/]"
_console.print(
f" {method_styled} {path_w} {status_styled} {timing_styled} {origin}"
)
if ctx:
_console.print(f" [dim]↳[/dim] {ctx}")
return response
+8
View File
@@ -0,0 +1,8 @@
"""
slowapi rate limiter setup.
Import `limiter` here and attach it to the FastAPI app in main.py.
"""
from slowapi import Limiter
from slowapi.util import get_remote_address
limiter = Limiter(key_func=get_remote_address, headers_enabled=True)
+44
View File
@@ -0,0 +1,44 @@
from app.models.base import Base
from app.models.page import Page
from app.models.post import BlogPost
from app.models.settings import SiteSettings
from app.models.user import User, RefreshToken
from app.models.section import ContentSection
from app.models.analytics import AnalyticsEvent
from app.models.experiment import Experiment, ExperimentVariant, ExperimentEvent
from app.models.member import (
Member,
MemberVerificationCode,
MemberRefreshToken,
MagicLinkToken,
Walk,
Booking,
AdminMessage,
MemberNotificationDispatch,
)
from app.models.contact_lead import ContactLead
from app.models.audit import AuditLog
__all__ = [
"Base",
"Page",
"BlogPost",
"SiteSettings",
"User",
"RefreshToken",
"ContentSection",
"AnalyticsEvent",
"Experiment",
"ExperimentVariant",
"ExperimentEvent",
"Member",
"MemberVerificationCode",
"MemberRefreshToken",
"MagicLinkToken",
"Walk",
"Booking",
"AdminMessage",
"MemberNotificationDispatch",
"ContactLead",
"AuditLog",
]
+29
View File
@@ -0,0 +1,29 @@
from datetime import datetime
from sqlalchemy import String, DateTime, func, JSON
from sqlalchemy.orm import Mapped, mapped_column
from app.models.base import Base, UUIDMixin
class AnalyticsEvent(Base, UUIDMixin):
__tablename__ = "analytics_events"
event_type: Mapped[str] = mapped_column(String(64), nullable=False, index=True)
page: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
element: Mapped[str | None] = mapped_column(String(255), nullable=True)
metadata_: Mapped[dict | None] = mapped_column("metadata", JSON, nullable=True)
session_id: Mapped[str] = mapped_column(String(64), nullable=False, index=True)
ip_hash: Mapped[str | None] = mapped_column(String(64), nullable=True)
ip_partial: Mapped[str | None] = mapped_column(String(24), nullable=True)
user_agent: Mapped[str | None] = mapped_column(String(512), nullable=True)
browser: Mapped[str | None] = mapped_column(String(100), nullable=True)
os_name: Mapped[str | None] = mapped_column(String(100), nullable=True)
country: Mapped[str | None] = mapped_column(String(100), nullable=True)
city: Mapped[str | None] = mapped_column(String(100), nullable=True)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
index=True,
)
+69
View File
@@ -0,0 +1,69 @@
import uuid
from datetime import datetime
from typing import Optional
from sqlalchemy import DateTime, ForeignKey, Index, String, Text, JSON, func
from sqlalchemy import Uuid
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, UUIDMixin
class AuditLog(Base, UUIDMixin):
"""Immutable record of member activity and application errors."""
__tablename__ = "audit_logs"
__table_args__ = (
Index("ix_audit_logs_timestamp", "timestamp"),
Index("ix_audit_logs_member_id", "member_id"),
Index("ix_audit_logs_action_type", "action_type"),
Index("ix_audit_logs_status", "status"),
)
timestamp: Mapped[datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), nullable=False
)
# Nullable FK — SET NULL if member is deleted so the log is preserved.
member_id: Mapped[Optional[uuid.UUID]] = mapped_column(
Uuid(as_uuid=True),
ForeignKey("members.id", ondelete="SET NULL"),
nullable=True,
)
# Denormalised for readability after member deletion.
member_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
# One of: login, logout, page_visit, booking_created, booking_cancelled,
# profile_updated, onboarding_updated, contract_signed,
# account_claimed, message_read, error
action_type: Mapped[str] = mapped_column(String(64), nullable=False)
# Identifies the page / feature area, e.g. "members/dashboard"
area: Mapped[str] = mapped_column(String(255), nullable=False)
# Human-readable one-liner
description: Mapped[str] = mapped_column(String(500), nullable=False)
# success | warning | error
status: Mapped[str] = mapped_column(String(16), nullable=False, default="success")
# Optional related booking — SET NULL if booking is deleted.
booking_id: Mapped[Optional[uuid.UUID]] = mapped_column(
Uuid(as_uuid=True),
ForeignKey("bookings.id", ondelete="SET NULL"),
nullable=True,
)
# Error detail — populated for action_type='error' records.
error_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
error_detail: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
# Request metadata
ip_address: Mapped[Optional[str]] = mapped_column(String(64), nullable=True)
user_agent: Mapped[Optional[str]] = mapped_column(String(512), nullable=True)
# Catch-all JSON for any extra context (e.g. booking service_type)
extra: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True)
member: Mapped[Optional["Member"]] = relationship( # type: ignore[name-defined]
"Member", foreign_keys=[member_id]
)
+30
View File
@@ -0,0 +1,30 @@
import uuid
from datetime import datetime, timezone
from sqlalchemy import DateTime, func, Uuid
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
class Base(DeclarativeBase):
pass
class TimestampMixin:
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False,
)
class UUIDMixin:
id: Mapped[uuid.UUID] = mapped_column(
Uuid(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
+34
View File
@@ -0,0 +1,34 @@
import uuid
from datetime import datetime
from typing import Optional
from sqlalchemy import DateTime, ForeignKey, JSON, String, Text, func
from sqlalchemy import Uuid
from sqlalchemy.orm import Mapped, mapped_column
from app.models.base import Base, UUIDMixin, TimestampMixin
class ContactLead(Base, UUIDMixin, TimestampMixin):
__tablename__ = "contact_leads"
full_name: Mapped[str] = mapped_column(String(255), nullable=False)
email: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
phone: Mapped[Optional[str]] = mapped_column(String(50), nullable=True)
requested_services: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
pet_name: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
pet_breed: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
suburb: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
service_area_status: Mapped[Optional[str]] = mapped_column(String(32), nullable=True)
message: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
source: Mapped[str] = mapped_column(String(50), nullable=False, default="contact_form")
status: Mapped[str] = mapped_column(String(32), nullable=False, default="invite", index=True)
notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
metadata_json: Mapped[Optional[dict]] = mapped_column("metadata", JSON, nullable=True)
invited_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
invited_member_id: Mapped[Optional[uuid.UUID]] = mapped_column(
Uuid(as_uuid=True),
ForeignKey("members.id", ondelete="SET NULL"),
nullable=True,
index=True,
)
+68
View File
@@ -0,0 +1,68 @@
from datetime import datetime
from decimal import Decimal
from sqlalchemy import Boolean, DateTime, ForeignKey, Index, Integer, JSON, Numeric, String, UniqueConstraint, func
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, TimestampMixin, UUIDMixin
class Experiment(Base, UUIDMixin, TimestampMixin):
__tablename__ = "experiments"
experiment_key: Mapped[str] = mapped_column(String(64), nullable=False, unique=True, index=True)
cookie_name: Mapped[str] = mapped_column(String(96), nullable=False, unique=True)
name: Mapped[str] = mapped_column(String(120), nullable=False)
description: Mapped[str | None] = mapped_column(String(512), nullable=True)
enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, index=True)
eligible_routes: Mapped[list[str]] = mapped_column(JSON, nullable=False, default=list)
variants: Mapped[list["ExperimentVariant"]] = relationship(
back_populates="experiment",
cascade="all, delete-orphan",
passive_deletes=True,
order_by="ExperimentVariant.variant_key",
)
class ExperimentVariant(Base, UUIDMixin, TimestampMixin):
__tablename__ = "experiment_variants"
__table_args__ = (
UniqueConstraint("experiment_id", "variant_key", name="uq_experiment_variants_experiment_variant"),
)
experiment_id: Mapped[str] = mapped_column(
ForeignKey("experiments.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
variant_key: Mapped[str] = mapped_column(String(64), nullable=False)
label: Mapped[str] = mapped_column(String(120), nullable=False)
allocation: Mapped[int] = mapped_column(Integer, nullable=False)
is_control: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
experiment: Mapped[Experiment] = relationship(back_populates="variants")
class ExperimentEvent(Base, UUIDMixin):
__tablename__ = "experiment_events"
__table_args__ = (
Index("ix_experiment_events_experiment_variant_created_at", "experiment_key", "variant_key", "created_at"),
Index("ix_experiment_events_session_created_at", "session_id", "created_at"),
)
experiment_key: Mapped[str] = mapped_column(String(64), nullable=False, index=True)
variant_key: Mapped[str] = mapped_column(String(64), nullable=False, index=True)
session_id: Mapped[str] = mapped_column(String(128), nullable=False, index=True)
user_id: Mapped[str | None] = mapped_column(String(64), nullable=True, index=True)
path: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
event_type: Mapped[str] = mapped_column(String(64), nullable=False, index=True)
conversion_value: Mapped[Decimal | None] = mapped_column(Numeric(12, 2), nullable=True)
metadata_: Mapped[dict | None] = mapped_column("metadata", JSON, nullable=True)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
nullable=False,
index=True,
default=datetime.utcnow,
server_default=func.now(),
)
+192
View File
@@ -0,0 +1,192 @@
import uuid
from datetime import datetime
from typing import Optional
from sqlalchemy import String, Boolean, DateTime, ForeignKey, Text, JSON, func, UniqueConstraint
from sqlalchemy import Uuid
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, UUIDMixin, TimestampMixin
class Member(Base, UUIDMixin, TimestampMixin):
__tablename__ = "members"
email: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True)
hashed_password: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
first_name: Mapped[str] = mapped_column(String(100), nullable=False)
last_name: Mapped[str] = mapped_column(String(100), nullable=False)
phone: Mapped[Optional[str]] = mapped_column(String(50), nullable=True)
address: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
emergency_contact: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
is_claimed: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
notifications_enabled: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
member_status: Mapped[str] = mapped_column(String(32), default="invited", nullable=False, index=True)
claimed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
onboarding_completed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
contract_signed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
contract_signer_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
contract_version: Mapped[Optional[str]] = mapped_column(String(50), nullable=True)
activated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
onboarding_data: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True)
service_pricing_overrides: Mapped[dict] = mapped_column(JSON, nullable=False, default=dict)
force_two_factor: Mapped[Optional[bool]] = mapped_column(Boolean, nullable=True)
verification_codes: Mapped[list["MemberVerificationCode"]] = relationship(
"MemberVerificationCode", back_populates="member", cascade="all, delete-orphan"
)
refresh_tokens: Mapped[list["MemberRefreshToken"]] = relationship(
"MemberRefreshToken", back_populates="member", cascade="all, delete-orphan"
)
walks: Mapped[list["Walk"]] = relationship(
"Walk", back_populates="member", cascade="all, delete-orphan"
)
bookings: Mapped[list["Booking"]] = relationship(
"Booking", back_populates="member", cascade="all, delete-orphan"
)
messages: Mapped[list["AdminMessage"]] = relationship(
"AdminMessage", back_populates="member", cascade="all, delete-orphan"
)
notification_dispatches: Mapped[list["MemberNotificationDispatch"]] = relationship(
"MemberNotificationDispatch", back_populates="member", cascade="all, delete-orphan"
)
magic_link_tokens: Mapped[list["MagicLinkToken"]] = relationship(
"MagicLinkToken", back_populates="member", cascade="all, delete-orphan"
)
class MemberVerificationCode(Base, UUIDMixin):
__tablename__ = "member_verification_codes"
member_id: Mapped[uuid.UUID] = mapped_column(
Uuid(as_uuid=True),
ForeignKey("members.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
code_hash: Mapped[str] = mapped_column(String(255), nullable=False)
purpose: Mapped[str] = mapped_column(String(20), nullable=False) # "claim" | "login_2fa"
expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
used_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), nullable=False
)
member: Mapped["Member"] = relationship("Member", back_populates="verification_codes")
class MemberRefreshToken(Base, UUIDMixin):
__tablename__ = "member_refresh_tokens"
member_id: Mapped[uuid.UUID] = mapped_column(
Uuid(as_uuid=True),
ForeignKey("members.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
token_hash: Mapped[str] = mapped_column(String(255), nullable=False)
expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
revoked: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), nullable=False
)
member: Mapped["Member"] = relationship("Member", back_populates="refresh_tokens")
class Walk(Base, UUIDMixin, TimestampMixin):
__tablename__ = "walks"
member_id: Mapped[uuid.UUID] = mapped_column(
Uuid(as_uuid=True),
ForeignKey("members.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
walked_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
service_type: Mapped[str] = mapped_column(String(50), nullable=False) # pack_walk | 1_1_walk | puppy_visit
duration_minutes: Mapped[int] = mapped_column(nullable=False, default=60)
notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
recorded_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
member: Mapped["Member"] = relationship("Member", back_populates="walks")
class Booking(Base, UUIDMixin, TimestampMixin):
__tablename__ = "bookings"
member_id: Mapped[uuid.UUID] = mapped_column(
Uuid(as_uuid=True),
ForeignKey("members.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
service_type: Mapped[str] = mapped_column(String(50), nullable=False)
requested_date: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
status: Mapped[str] = mapped_column(String(20), nullable=False, default="pending") # pending | confirmed | cancelled | completed
notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
admin_notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
member: Mapped["Member"] = relationship("Member", back_populates="bookings")
class AdminMessage(Base, UUIDMixin, TimestampMixin):
__tablename__ = "admin_messages"
member_id: Mapped[uuid.UUID] = mapped_column(
Uuid(as_uuid=True),
ForeignKey("members.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
subject: Mapped[str] = mapped_column(String(255), nullable=False)
body: Mapped[str] = mapped_column(Text, nullable=False)
read_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
sent_by: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
deleted_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
# "inbound" = admin → member, "outbound" = member reply
direction: Mapped[str] = mapped_column(String(16), nullable=False, default="inbound")
reply_to_id: Mapped[Optional[uuid.UUID]] = mapped_column(
Uuid(as_uuid=True),
ForeignKey("admin_messages.id", ondelete="SET NULL"),
nullable=True,
)
member: Mapped["Member"] = relationship("Member", back_populates="messages")
class MagicLinkToken(Base, UUIDMixin):
__tablename__ = "member_magic_link_tokens"
member_id: Mapped[uuid.UUID] = mapped_column(
Uuid(as_uuid=True),
ForeignKey("members.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
token_hash: Mapped[str] = mapped_column(String(255), nullable=False, unique=True, index=True)
expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
used_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), nullable=True)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), nullable=False
)
member: Mapped["Member"] = relationship("Member", back_populates="magic_link_tokens")
class MemberNotificationDispatch(Base, UUIDMixin, TimestampMixin):
__tablename__ = "member_notification_dispatches"
__table_args__ = (
UniqueConstraint("member_id", "dispatch_key", name="uq_member_notification_dispatches_member_key"),
)
member_id: Mapped[uuid.UUID] = mapped_column(
Uuid(as_uuid=True),
ForeignKey("members.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
notification_type: Mapped[str] = mapped_column(String(64), nullable=False, index=True)
dispatch_key: Mapped[str] = mapped_column(String(255), nullable=False)
metadata_json: Mapped[Optional[dict]] = mapped_column("metadata", JSON, nullable=True)
member: Mapped["Member"] = relationship("Member", back_populates="notification_dispatches")
+19
View File
@@ -0,0 +1,19 @@
from sqlalchemy import String, Text, Boolean, Index
from sqlalchemy.orm import Mapped, mapped_column
from app.models.base import Base, UUIDMixin, TimestampMixin
class Page(Base, UUIDMixin, TimestampMixin):
__tablename__ = "pages"
title: Mapped[str] = mapped_column(String(255), nullable=False)
slug: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
body: Mapped[str] = mapped_column(Text, nullable=False, default="")
meta_title: Mapped[str | None] = mapped_column(String(255), nullable=True)
meta_description: Mapped[str | None] = mapped_column(String(500), nullable=True)
og_image_url: Mapped[str | None] = mapped_column(String(2048), nullable=True)
published: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
__table_args__ = (
Index("ix_pages_slug", "slug"),
)
+24
View File
@@ -0,0 +1,24 @@
from typing import List
from sqlalchemy import String, Text, Boolean, Index, JSON
from sqlalchemy.orm import Mapped, mapped_column
from sqlalchemy.dialects.postgresql import ARRAY
from sqlalchemy import text
from app.models.base import Base, UUIDMixin, TimestampMixin
class BlogPost(Base, UUIDMixin, TimestampMixin):
__tablename__ = "blog_posts"
title: Mapped[str] = mapped_column(String(255), nullable=False)
slug: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
excerpt: Mapped[str | None] = mapped_column(Text, nullable=True)
body: Mapped[str] = mapped_column(Text, nullable=False, default="")
author: Mapped[str | None] = mapped_column(String(255), nullable=True)
featured_image_url: Mapped[str | None] = mapped_column(String(2048), nullable=True)
# Use JSON for broader DB compatibility; PostgreSQL ARRAY is handled via type override in migration
tags: Mapped[list] = mapped_column(JSON, nullable=False, default=list)
published: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
__table_args__ = (
Index("ix_blog_posts_slug", "slug"),
)
+20
View File
@@ -0,0 +1,20 @@
from datetime import datetime, timezone
from sqlalchemy import Text, DateTime, func
from sqlalchemy.orm import Mapped, mapped_column
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy import JSON
from app.models.base import Base
class ContentSection(Base):
__tablename__ = "content_sections"
key: Mapped[str] = mapped_column(Text, primary_key=True)
data: Mapped[dict] = mapped_column(JSON().with_variant(JSONB, "postgresql"), nullable=False)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False,
)
+27
View File
@@ -0,0 +1,27 @@
from datetime import datetime
from sqlalchemy import String, Text, JSON, Boolean, Integer, DateTime
from sqlalchemy.orm import Mapped, mapped_column
from app.models.base import Base, UUIDMixin, TimestampMixin
class SiteSettings(Base, UUIDMixin, TimestampMixin):
__tablename__ = "site_settings"
site_name: Mapped[str] = mapped_column(String(255), nullable=False, default="")
tagline: Mapped[str | None] = mapped_column(String(500), nullable=True)
logo_url: Mapped[str | None] = mapped_column(String(2048), nullable=True)
footer_text: Mapped[str | None] = mapped_column(Text, nullable=True)
social_links: Mapped[dict] = mapped_column(JSON, nullable=False, default=dict)
automatic_member_notifications_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
nz_public_holiday_notifications_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
invoice_reminder_notifications_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
invoice_day_of_week: Mapped[int] = mapped_column(Integer, nullable=False, default=1)
admin_notifications_cleared_before: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
bookings_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
walks_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
messages_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
two_factor_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
audit_history_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
experiments_enabled: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
service_pricing: Mapped[dict] = mapped_column(JSON, nullable=False, default=dict)
+39
View File
@@ -0,0 +1,39 @@
import uuid
from datetime import datetime
from sqlalchemy import String, Boolean, DateTime, ForeignKey, func
from sqlalchemy import Uuid
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.models.base import Base, UUIDMixin, TimestampMixin
class User(Base, UUIDMixin, TimestampMixin):
__tablename__ = "users"
email: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True)
hashed_password: Mapped[str] = mapped_column(String(255), nullable=False)
is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
refresh_tokens: Mapped[list["RefreshToken"]] = relationship(
"RefreshToken", back_populates="user", cascade="all, delete-orphan"
)
class RefreshToken(Base, UUIDMixin):
__tablename__ = "refresh_tokens"
user_id: Mapped[uuid.UUID] = mapped_column(
Uuid(as_uuid=True),
ForeignKey("users.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
token_hash: Mapped[str] = mapped_column(String(255), nullable=False)
expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
revoked: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
)
user: Mapped["User"] = relationship("User", back_populates="refresh_tokens")
View File
+202
View File
@@ -0,0 +1,202 @@
import hashlib
import secrets
import httpx
import user_agents
from fastapi import APIRouter, Depends, Request, Response
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.auth.deps import get_current_user
from app.middleware.rate_limit import limiter
from app.schemas.analytics import AnalyticsSummary, BookingOperationsSummary, EventCreate
from app.services.analytics import get_booking_operations_summary, get_summary, record_event
router = APIRouter(tags=["Analytics"])
ANON_COOKIE_NAME = "__gw_anon"
ANON_COOKIE_MAX_AGE = 60 * 60 * 24 * 365
CLIENT_METADATA_KEYS = {
"area",
"channel",
"destination",
"menu",
"plan",
"popular",
"price",
"unit",
"variant",
}
_PRIVATE_PREFIXES = ("127.", "10.", "172.16.", "172.17.", "172.18.", "172.19.",
"172.20.", "172.21.", "172.22.", "172.23.", "172.24.", "172.25.",
"172.26.", "172.27.", "172.28.", "172.29.", "172.30.", "172.31.",
"192.168.", "::1", "localhost")
def _mask_ip(ip: str) -> str:
"""Return a privacy-safe partial IP: last octet replaced with 'x'."""
if ":" in ip: # IPv6 — keep first 4 groups
parts = ip.split(":")
return ":".join(parts[:4]) + ":x"
parts = ip.split(".")
if len(parts) == 4:
return f"{parts[0]}.{parts[1]}.{parts[2]}.x"
return ip
def _get_client_ip(request: Request) -> str | None:
"""Resolve the best-effort client IP, preferring forwarded headers."""
forwarded = request.headers.get("x-forwarded-for")
if forwarded:
first = forwarded.split(",")[0].strip()
if first:
return first
real_ip = request.headers.get("x-real-ip")
if real_ip:
return real_ip.strip()
return request.client.host if request.client else None
def _should_secure_cookie(request: Request) -> bool:
"""Use Secure cookies in HTTPS contexts, but allow localhost HTTP development."""
return request.url.scheme == "https"
def _sanitize_client_metadata(metadata: dict | None) -> dict | None:
"""Keep only flat, non-identifying telemetry labels from the browser."""
if not metadata:
return None
clean: dict[str, str | int | float | bool] = {}
for key, value in metadata.items():
if not isinstance(key, str) or key not in CLIENT_METADATA_KEYS:
continue
if isinstance(value, str):
clean[key] = value[:120]
continue
if isinstance(value, bool):
clean[key] = value
continue
if isinstance(value, (int, float)):
clean[key] = value
return clean or None
def _get_or_create_session_id(request: Request, response: Response, payload_session_id: str | None) -> str:
"""Use a server-owned anonymous session id, falling back to legacy payload support."""
cookie_session_id = request.cookies.get(ANON_COOKIE_NAME)
session_id = cookie_session_id or payload_session_id or secrets.token_urlsafe(24)
if cookie_session_id != session_id:
response.set_cookie(
key=ANON_COOKIE_NAME,
value=session_id,
max_age=ANON_COOKIE_MAX_AGE,
httponly=True,
samesite="lax",
secure=_should_secure_cookie(request),
path="/",
)
return session_id
def _parse_ua(ua_string: str) -> tuple[str | None, str | None]:
"""Parse a User-Agent string into (browser, os_name)."""
if not ua_string:
return None, None
ua = user_agents.parse(ua_string)
browser = ua.browser.family
if browser and browser != "Other" and ua.browser.version_string:
major = ua.browser.version_string.split(".")[0]
browser = f"{browser} {major}"
os_name = ua.os.family
if os_name and os_name != "Other" and ua.os.version_string:
os_name = f"{os_name} {ua.os.version_string}"
return (
None if not browser or browser == "Other" else browser[:100],
None if not os_name or os_name == "Other" else os_name[:100],
)
async def _geo_lookup(ip: str) -> tuple[str | None, str | None]:
"""Resolve IP to (country, city) via ip-api.com. Returns (None, None) on failure."""
if not ip or any(ip.startswith(p) for p in _PRIVATE_PREFIXES):
return None, None
try:
async with httpx.AsyncClient(timeout=2.0) as client:
r = await client.get(
f"http://ip-api.com/json/{ip}",
params={"fields": "status,country,city"},
)
if r.status_code == 200:
d = r.json()
if d.get("status") == "success":
return d.get("country"), d.get("city")
except Exception:
pass
return None, None
@router.post("/api/web/event", status_code=201)
@router.post("/api/analytics/event", status_code=201)
@limiter.limit("60/minute")
async def ingest_event(
request: Request,
response: Response,
data: EventCreate,
db: AsyncSession = Depends(get_db),
):
"""Record a telemetry event. Public — no auth required."""
raw_ip = _get_client_ip(request)
ip_hash = hashlib.sha256(raw_ip.encode()).hexdigest()[:16] if raw_ip else None
ip_partial = _mask_ip(raw_ip) if raw_ip else None
ua_string = request.headers.get("User-Agent", "")
browser, os_name = _parse_ua(ua_string)
country, city = await _geo_lookup(raw_ip or "")
session_id = _get_or_create_session_id(request, response, data.session_id)
metadata = _sanitize_client_metadata(data.metadata) or {}
referer = request.headers.get("referer")
if referer:
metadata["referrer"] = referer[:255]
normalized = data.model_copy(update={
"session_id": session_id,
"metadata": metadata or None,
})
await record_event(
db, normalized,
ip_hash=ip_hash,
ip_partial=ip_partial,
user_agent=ua_string[:512] if ua_string else None,
browser=browser,
os_name=os_name,
country=country,
city=city,
)
return {"ok": True}
@router.get("/api/v1/analytics/summary", response_model=AnalyticsSummary)
async def analytics_summary(
db: AsyncSession = Depends(get_db),
_=Depends(get_current_user),
):
"""Return analytics summary. Auth required."""
return await get_summary(db)
@router.get("/api/v1/analytics/bookings-summary", response_model=BookingOperationsSummary)
async def booking_operations_summary(
db: AsyncSession = Depends(get_db),
_=Depends(get_current_user),
):
"""Return booking operations reporting. Auth required."""
return await get_booking_operations_summary(db)
+140
View File
@@ -0,0 +1,140 @@
"""
Audit router.
Admin:
GET /admin/audit — paginated, filtered audit log (admin-authenticated)
Member:
POST /members/audit/page-visit — record a page navigation (member-authenticated)
"""
import math
import uuid
from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Query, Request, Response
from sqlalchemy import func, or_, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.auth.deps import get_current_user
from app.auth.member_deps import get_authenticated_member
from app.database import get_db
from app.middleware.rate_limit import limiter
from app.models.audit import AuditLog
from app.models.member import Member
from app.models.user import User
from app.schemas.audit import AuditLogPage, AuditLogResponse, PageVisitSchema
from app.services.audit import log_audit
from app.services.settings import get_feature_settings_snapshot
router = APIRouter(tags=["Audit"])
async def _require_audit_history_enabled(db: AsyncSession) -> None:
feature_settings = await get_feature_settings_snapshot(db)
if not feature_settings.audit_history_enabled:
raise HTTPException(status_code=404, detail="Audit history is currently disabled.")
# ── Admin: query audit log ─────────────────────────────────────────────────────
@router.get("/admin/audit", response_model=AuditLogPage)
async def admin_list_audit(
page: int = Query(1, ge=1),
page_size: int = Query(50, ge=1, le=200),
member_id: Optional[uuid.UUID] = Query(None),
action_type: Optional[str] = Query(None),
status: Optional[str] = Query(None),
area: Optional[str] = Query(None),
date_from: Optional[datetime] = Query(None),
date_to: Optional[datetime] = Query(None),
search: Optional[str] = Query(None),
sort_by: str = Query("timestamp"),
sort_dir: str = Query("desc"),
_admin: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
await _require_audit_history_enabled(db)
allowed_sort = {"timestamp", "action_type", "status", "area", "member_email"}
if sort_by not in allowed_sort:
sort_by = "timestamp"
col = getattr(AuditLog, sort_by)
order = col.desc() if sort_dir == "desc" else col.asc()
conditions = []
if member_id is not None:
conditions.append(AuditLog.member_id == member_id)
if action_type:
conditions.append(AuditLog.action_type == action_type)
if status:
conditions.append(AuditLog.status == status)
if area:
conditions.append(AuditLog.area.ilike(f"%{area}%"))
if date_from:
conditions.append(AuditLog.timestamp >= date_from)
if date_to:
conditions.append(AuditLog.timestamp <= date_to)
if search:
term = f"%{search}%"
conditions.append(
or_(
AuditLog.member_email.ilike(term),
AuditLog.description.ilike(term),
AuditLog.area.ilike(term),
AuditLog.action_type.ilike(term),
AuditLog.error_message.ilike(term),
)
)
base_q = select(AuditLog)
if conditions:
from sqlalchemy import and_
base_q = base_q.where(and_(*conditions))
count_result = await db.execute(select(func.count()).select_from(base_q.subquery()))
total = count_result.scalar_one()
offset = (page - 1) * page_size
items_result = await db.execute(base_q.order_by(order).offset(offset).limit(page_size))
items = items_result.scalars().all()
return AuditLogPage(
items=[AuditLogResponse.model_validate(i) for i in items],
total=total,
page=page,
page_size=page_size,
total_pages=max(1, math.ceil(total / page_size)),
)
# ── Member: page visit ─────────────────────────────────────────────────────────
@router.post("/members/audit/page-visit", status_code=204)
@limiter.limit("120/minute")
async def member_log_page_visit(
request: Request,
response: Response,
data: PageVisitSchema,
member: Member = Depends(get_authenticated_member),
db: AsyncSession = Depends(get_db),
):
feature_settings = await get_feature_settings_snapshot(db)
if not feature_settings.audit_history_enabled:
return
path = data.path[:255] if data.path else "unknown"
title = data.title or path
await log_audit(
db,
member_id=member.id,
member_email=member.email,
action_type="page_visit",
area=path,
description=f"Visited: {title}",
status="success",
ip_address=request.client.host if request.client else None,
user_agent=request.headers.get("User-Agent"),
)
+129
View File
@@ -0,0 +1,129 @@
from datetime import datetime, timezone
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.auth.jwt import (
create_access_token,
create_refresh_token,
hash_refresh_token,
get_token_expiry,
)
from app.auth.password import verify_password
from app.database import get_db
from app.middleware.rate_limit import limiter
from app.models.user import User, RefreshToken
from app.schemas.auth import LoginRequest, TokenResponse, RefreshRequest
router = APIRouter(prefix="/auth", tags=["Auth"])
@router.post("/login", response_model=TokenResponse)
@limiter.limit("5/minute")
async def login(
request: Request,
response: Response,
data: LoginRequest,
db: AsyncSession = Depends(get_db),
):
"""
Authenticate with email and password.
Returns access token (15 min) and refresh token (7 days).
"""
result = await db.execute(select(User).where(User.email == data.email))
user = result.scalars().first()
if user is None or not verify_password(data.password, user.hashed_password):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid email or password",
headers={"WWW-Authenticate": "Bearer"},
)
if not user.is_active:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Account is inactive",
)
access_token = create_access_token(data={"sub": str(user.id)})
plaintext_refresh, refresh_hash = create_refresh_token()
refresh_token_row = RefreshToken(
user_id=user.id,
token_hash=refresh_hash,
expires_at=get_token_expiry(),
revoked=False,
created_at=datetime.now(timezone.utc),
)
db.add(refresh_token_row)
await db.flush()
return TokenResponse(
access_token=access_token,
refresh_token=plaintext_refresh,
token_type="bearer",
)
@router.post("/refresh", response_model=TokenResponse)
@limiter.limit("5/minute")
async def refresh_tokens(
request: Request,
response: Response,
data: RefreshRequest,
db: AsyncSession = Depends(get_db),
):
"""
Exchange a valid refresh token for a new token pair.
The old refresh token is revoked atomically.
"""
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid or expired refresh token",
headers={"WWW-Authenticate": "Bearer"},
)
now = datetime.now(timezone.utc)
token_hash = hash_refresh_token(data.refresh_token)
result = await db.execute(
select(RefreshToken).where(
RefreshToken.token_hash == token_hash,
RefreshToken.revoked == False,
RefreshToken.expires_at > now,
)
)
matched_row = result.scalars().first()
if matched_row is None:
raise credentials_exception
# Revoke old token
matched_row.revoked = True
# Load user
result = await db.execute(select(User).where(User.id == matched_row.user_id))
user = result.scalars().first()
if user is None or not user.is_active:
raise credentials_exception
# Issue new tokens
access_token = create_access_token(data={"sub": str(user.id)})
plaintext_refresh, refresh_hash = create_refresh_token()
new_refresh_row = RefreshToken(
user_id=user.id,
token_hash=refresh_hash,
expires_at=get_token_expiry(),
revoked=False,
created_at=now,
)
db.add(new_refresh_row)
await db.flush()
return TokenResponse(
access_token=access_token,
refresh_token=plaintext_refresh,
token_type="bearer",
)
+167
View File
@@ -0,0 +1,167 @@
import hashlib
import secrets
import uuid
from datetime import datetime, timedelta, timezone
from fastapi import APIRouter, Depends, HTTPException, Request, Response
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.auth.deps import get_current_user
from app.config import settings
from app.database import get_db
from app.middleware.rate_limit import limiter
from app.models.contact_lead import ContactLead
from app.models.member import Member, MagicLinkToken
from app.models.user import User
from app.schemas.contact import (
ContactLeadCreate,
ContactLeadInviteRequest,
ContactLeadInviteResponse,
ContactLeadResponse,
ContactLeadUpdate,
)
from app.services.email import send_onboarding_invite
router = APIRouter(tags=["Contact Leads"])
def _split_name(full_name: str) -> tuple[str, str]:
parts = [part for part in full_name.strip().split() if part]
if not parts:
return "Goodwalk", "Client"
if len(parts) == 1:
return parts[0], "Client"
return parts[0], " ".join(parts[1:])
def _normalise_services(payload: ContactLeadCreate) -> str | None:
if payload.services:
return ", ".join(payload.services)
if payload.service:
return payload.service.strip() or None
return None
@router.post("/api/contact", response_model=ContactLeadResponse, status_code=201)
@limiter.limit("10/minute")
async def submit_contact_lead(
request: Request,
response: Response,
data: ContactLeadCreate,
db: AsyncSession = Depends(get_db),
):
lead = ContactLead(
full_name=data.name.strip(),
email=data.email.strip().lower(),
phone=(data.phone or "").strip() or None,
requested_services=_normalise_services(data),
pet_name=(data.petName or "").strip() or None,
pet_breed=(data.petBreed or "").strip() or None,
suburb=(data.location or "").strip() or None,
service_area_status=(data.serviceAreaStatus or "").strip() or None,
message=(data.message or "").strip() or None,
source=data.source,
status="invite",
metadata_json={
"services": data.services,
"service": data.service,
},
)
db.add(lead)
await db.flush()
await db.refresh(lead)
return lead
@router.get("/api/v1/admin/leads", response_model=list[ContactLeadResponse])
async def admin_list_leads(
_admin: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
result = await db.execute(select(ContactLead).order_by(ContactLead.created_at.desc()))
return result.scalars().all()
@router.put("/api/v1/admin/leads/{lead_id}", response_model=ContactLeadResponse)
async def admin_update_lead(
lead_id: uuid.UUID,
data: ContactLeadUpdate,
_admin: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
result = await db.execute(select(ContactLead).where(ContactLead.id == lead_id))
lead = result.scalars().first()
if lead is None:
raise HTTPException(status_code=404, detail="Lead not found.")
for field, value in data.model_dump(exclude_unset=True).items():
setattr(lead, field, value)
await db.flush()
await db.refresh(lead)
return lead
@router.post("/api/v1/admin/leads/{lead_id}/invite", response_model=ContactLeadInviteResponse)
async def admin_invite_lead(
lead_id: uuid.UUID,
data: ContactLeadInviteRequest,
_admin: User = Depends(get_current_user),
db: AsyncSession = Depends(get_db),
):
result = await db.execute(select(ContactLead).where(ContactLead.id == lead_id))
lead = result.scalars().first()
if lead is None:
raise HTTPException(status_code=404, detail="Lead not found.")
existing_member_result = await db.execute(select(Member).where(Member.email == lead.email))
member = existing_member_result.scalars().first()
if member is None:
first_name, last_name = _split_name(lead.full_name)
member = Member(
email=lead.email,
first_name=first_name,
last_name=last_name,
phone=lead.phone,
address=lead.suburb,
onboarding_data={
"dog_name": lead.pet_name,
"dog_breed": lead.pet_breed,
"preferred_service": lead.requested_services,
"lead_message": lead.message,
"service_area_status": lead.service_area_status,
"source": lead.source,
},
is_claimed=False,
is_active=True,
member_status="invited",
)
db.add(member)
await db.flush()
lead.invited_member_id = member.id
lead.invited_at = datetime.now(timezone.utc)
lead.status = "invited"
await db.flush()
await db.refresh(lead)
await db.refresh(member)
if data.send_email:
plaintext_token = secrets.token_urlsafe(32)
token_hash = hashlib.sha256(plaintext_token.encode()).hexdigest()
magic_token = MagicLinkToken(
member_id=member.id,
token_hash=token_hash,
expires_at=datetime.now(timezone.utc) + timedelta(days=7),
)
db.add(magic_token)
await db.flush()
magic_url = f"{settings.MEMBERS_URL.rstrip('/')}/join?token={plaintext_token}"
await send_onboarding_invite(lead.email, member.first_name, magic_url)
return ContactLeadInviteResponse(
lead=ContactLeadResponse.model_validate(lead),
member_id=member.id,
member_status=member.member_status,
)
+187
View File
@@ -0,0 +1,187 @@
import re
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.auth.deps import get_current_user
from app.database import get_db
from app.middleware.rate_limit import limiter
from app.schemas.experiments import (
ExperimentConversionCreate,
ExperimentDefinitionResponse,
ExperimentDefinitionUpdate,
ExperimentEventCreate,
ExperimentImpressionCreate,
ExperimentIngestResponse,
ExperimentResult,
)
from app.services.experiments import (
experiment_exists,
get_experiment_definition,
get_experiment_results,
list_experiment_definitions,
record_experiment_event,
upsert_experiment_definition,
)
from app.services.settings import get_feature_settings_snapshot
router = APIRouter(tags=["Experiments"])
BOT_UA_PATTERN = re.compile(r"(bot|crawler|spider|slurp|preview|headless)", re.IGNORECASE)
def _is_bot_request(request: Request) -> bool:
user_agent = request.headers.get("user-agent", "")
return bool(BOT_UA_PATTERN.search(user_agent))
def _validate_experiment_assignment(experiment_key: str, variant_key: str) -> None:
if not experiment_exists(experiment_key, variant_key):
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail="unknown experiment or variant",
)
async def _experiments_enabled(db: AsyncSession) -> bool:
feature_settings = await get_feature_settings_snapshot(db)
return feature_settings.experiments_enabled
async def _require_experiments_enabled(db: AsyncSession) -> None:
if not await _experiments_enabled(db):
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Experiments are currently disabled.")
@router.get("/api/experiments", response_model=list[ExperimentDefinitionResponse])
async def get_experiments(db: AsyncSession = Depends(get_db)):
if not await _experiments_enabled(db):
return []
return await list_experiment_definitions(db)
@router.post("/api/experiments/impression", response_model=ExperimentIngestResponse, status_code=202)
@limiter.limit("30/minute")
async def ingest_experiment_impression(
request: Request,
response: Response,
payload: ExperimentImpressionCreate,
db: AsyncSession = Depends(get_db),
):
if not await _experiments_enabled(db):
return ExperimentIngestResponse(ok=True, accepted=False)
_validate_experiment_assignment(payload.experiment_key, payload.variant_key)
if _is_bot_request(request):
return ExperimentIngestResponse(ok=True, accepted=False)
await record_experiment_event(db, payload)
return ExperimentIngestResponse(ok=True, accepted=True)
@router.post("/api/experiments/event", response_model=ExperimentIngestResponse, status_code=202)
@limiter.limit("30/minute")
async def ingest_experiment_event(
request: Request,
response: Response,
payload: ExperimentEventCreate,
db: AsyncSession = Depends(get_db),
):
if not await _experiments_enabled(db):
return ExperimentIngestResponse(ok=True, accepted=False)
_validate_experiment_assignment(payload.experiment_key, payload.variant_key)
if _is_bot_request(request):
return ExperimentIngestResponse(ok=True, accepted=False)
await record_experiment_event(db, payload)
return ExperimentIngestResponse(ok=True, accepted=True)
@router.post("/api/experiments/conversion", response_model=ExperimentIngestResponse, status_code=202)
@limiter.limit("30/minute")
async def ingest_experiment_conversion(
request: Request,
response: Response,
payload: ExperimentConversionCreate,
db: AsyncSession = Depends(get_db),
):
if not await _experiments_enabled(db):
return ExperimentIngestResponse(ok=True, accepted=False)
_validate_experiment_assignment(payload.experiment_key, payload.variant_key)
if _is_bot_request(request):
return ExperimentIngestResponse(ok=True, accepted=False)
await record_experiment_event(db, payload)
return ExperimentIngestResponse(ok=True, accepted=True)
@router.get("/api/v1/experiments/results", response_model=list[ExperimentResult])
async def experiment_results(
experiment_key: str | None = None,
db: AsyncSession = Depends(get_db),
_=Depends(get_current_user),
):
await _require_experiments_enabled(db)
return await get_experiment_results(db, experiment_key)
@router.get("/api/admin/experiments", response_model=list[ExperimentDefinitionResponse])
async def admin_list_experiments(
db: AsyncSession = Depends(get_db),
_=Depends(get_current_user),
):
await _require_experiments_enabled(db)
return await list_experiment_definitions(db)
@router.get("/api/admin/experiments/{experiment_key}", response_model=ExperimentDefinitionResponse)
async def admin_get_experiment(
experiment_key: str,
db: AsyncSession = Depends(get_db),
_=Depends(get_current_user),
):
await _require_experiments_enabled(db)
experiment = await get_experiment_definition(db, experiment_key)
if experiment is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Experiment not found")
definitions = await list_experiment_definitions(db)
match = next((item for item in definitions if item.experiment_key == experiment_key), None)
assert match is not None
return match
@router.put("/api/admin/experiments/{experiment_key}", response_model=ExperimentDefinitionResponse)
async def admin_update_experiment(
experiment_key: str,
payload: ExperimentDefinitionUpdate,
db: AsyncSession = Depends(get_db),
_=Depends(get_current_user),
):
await _require_experiments_enabled(db)
try:
experiment = await upsert_experiment_definition(db, experiment_key, payload)
except ValueError as exc:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
return ExperimentDefinitionResponse(
experiment_key=experiment.experiment_key,
cookie_name=experiment.cookie_name,
name=experiment.name,
description=experiment.description,
enabled=experiment.enabled,
eligible_routes=experiment.eligible_routes,
variants=[
{
"variant_key": variant.variant_key,
"label": variant.label,
"allocation": variant.allocation,
"is_control": variant.is_control,
}
for variant in experiment.variants
],
)
File diff suppressed because it is too large Load Diff
+64
View File
@@ -0,0 +1,64 @@
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.auth.deps import get_current_user
from app.database import get_db
from app.models.user import User
from app.schemas.page import PageCreate, PageUpdate, PageResponse
from app.services import pages as page_service
from typing import List
router = APIRouter(prefix="/pages", tags=["Pages"])
@router.get("", response_model=List[PageResponse])
async def list_pages(db: AsyncSession = Depends(get_db)):
"""List all published pages."""
pages = await page_service.get_published_pages(db)
return [PageResponse.model_validate(p) for p in pages]
@router.get("/{slug}", response_model=PageResponse)
async def get_page(slug: str, db: AsyncSession = Depends(get_db)):
"""Get a single published page by slug."""
page = await page_service.get_page_by_slug(db, slug, published_only=True)
if page is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Page '{slug}' not found")
return PageResponse.model_validate(page)
@router.post("", response_model=PageResponse, status_code=status.HTTP_201_CREATED)
async def create_page(
data: PageCreate,
db: AsyncSession = Depends(get_db),
current_user: User = Depends(get_current_user),
):
"""Create a new page (auth required)."""
page = await page_service.create_page(db, data)
return PageResponse.model_validate(page)
@router.put("/{slug}", response_model=PageResponse)
async def update_page(
slug: str,
data: PageUpdate,
db: AsyncSession = Depends(get_db),
current_user: User = Depends(get_current_user),
):
"""Update a page by slug (auth required)."""
page = await page_service.update_page(db, slug, data)
if page is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Page '{slug}' not found")
return PageResponse.model_validate(page)
@router.delete("/{slug}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_page(
slug: str,
db: AsyncSession = Depends(get_db),
current_user: User = Depends(get_current_user),
):
"""Delete a page by slug (auth required)."""
deleted = await page_service.delete_page(db, slug)
if not deleted:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Page '{slug}' not found")
+66
View File
@@ -0,0 +1,66 @@
from fastapi import APIRouter, Depends, HTTPException, Query, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.auth.deps import get_current_user
from app.database import get_db
from app.models.user import User
from app.schemas.post import PostCreate, PostUpdate, PostResponse, PaginatedPostsResponse
from app.services import posts as post_service
router = APIRouter(prefix="/posts", tags=["Posts"])
@router.get("", response_model=PaginatedPostsResponse)
async def list_posts(
page: int = Query(default=1, ge=1),
per_page: int = Query(default=10, ge=1, le=100),
db: AsyncSession = Depends(get_db),
):
"""List published posts with pagination."""
return await post_service.get_published_posts(db, page=page, per_page=per_page)
@router.get("/{slug}", response_model=PostResponse)
async def get_post(slug: str, db: AsyncSession = Depends(get_db)):
"""Get a single published post by slug."""
post = await post_service.get_post_by_slug(db, slug, published_only=True)
if post is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Post '{slug}' not found")
return PostResponse.model_validate(post)
@router.post("", response_model=PostResponse, status_code=status.HTTP_201_CREATED)
async def create_post(
data: PostCreate,
db: AsyncSession = Depends(get_db),
current_user: User = Depends(get_current_user),
):
"""Create a new blog post (auth required)."""
post = await post_service.create_post(db, data)
return PostResponse.model_validate(post)
@router.put("/{slug}", response_model=PostResponse)
async def update_post(
slug: str,
data: PostUpdate,
db: AsyncSession = Depends(get_db),
current_user: User = Depends(get_current_user),
):
"""Update a post by slug (auth required)."""
post = await post_service.update_post(db, slug, data)
if post is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Post '{slug}' not found")
return PostResponse.model_validate(post)
@router.delete("/{slug}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_post(
slug: str,
db: AsyncSession = Depends(get_db),
current_user: User = Depends(get_current_user),
):
"""Delete a post by slug (auth required)."""
deleted = await post_service.delete_post(db, slug)
if not deleted:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Post '{slug}' not found")
+101
View File
@@ -0,0 +1,101 @@
"""
Legacy-compatible content section endpoints.
Matches the URL shapes the SvelteKit frontend already calls,
so no frontend changes are needed.
"""
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.auth.deps import get_current_user
from app.models.user import User
from app.services.sections import get_section, upsert_section, list_sections
router = APIRouter(tags=["Sections"])
# Slug → content_sections key
PAGE_SLUG_MAP = {
"home": "pages.home",
"pack-walks": "pages.packWalks",
"1-1-walks": "pages.oneOnOneWalks",
"puppy-visits": "pages.puppyVisits",
"pricing": "pages.pricing",
"about": "pages.about",
"contact": "pages.contact",
}
# ── Public read endpoints ────────────────────────────────────────────────────
@router.get("/api/site-settings")
async def site_settings(db: AsyncSession = Depends(get_db)):
data = await get_section(db, "siteSettings")
return data or {}
@router.get("/api/navigation")
async def navigation(db: AsyncSession = Depends(get_db)):
data = await get_section(db, "navigation")
return data or {"items": []}
@router.get("/api/footer")
async def footer(db: AsyncSession = Depends(get_db)):
data = await get_section(db, "footer")
return data or {}
@router.get("/api/testimonials")
async def testimonials(db: AsyncSession = Depends(get_db)):
data = await get_section(db, "testimonials")
return data if data is not None else []
@router.get("/api/onboarding")
async def onboarding(db: AsyncSession = Depends(get_db)):
data = await get_section(db, "onboarding")
return data or {}
@router.get("/api/pages/{slug}")
async def page_by_slug(slug: str, db: AsyncSession = Depends(get_db)):
key = PAGE_SLUG_MAP.get(slug)
if not key:
raise HTTPException(status_code=404, detail=f"Page '{slug}' not found")
data = await get_section(db, key)
if data is None:
raise HTTPException(status_code=404, detail=f"Page '{slug}' not found")
return data
# ── Protected admin endpoints ────────────────────────────────────────────────
@router.get("/api/admin/sections")
async def admin_list_sections(
db: AsyncSession = Depends(get_db),
_: User = Depends(get_current_user),
):
return await list_sections(db)
@router.get("/api/admin/sections/{key:path}")
async def admin_get_section(
key: str,
db: AsyncSession = Depends(get_db),
_: User = Depends(get_current_user),
):
data = await get_section(db, key)
if data is None:
raise HTTPException(status_code=404, detail="Section not found")
return {"key": key, "data": data}
@router.put("/api/admin/sections/{key:path}")
async def admin_update_section(
key: str,
body: dict,
db: AsyncSession = Depends(get_db),
_: User = Depends(get_current_user),
):
row = await upsert_section(db, key, body)
return {"success": True, "key": row.key}
+202
View File
@@ -0,0 +1,202 @@
from datetime import UTC, datetime, timedelta
import httpx
from fastapi import APIRouter, Depends, HTTPException, Query, status
from sqlalchemy.ext.asyncio import AsyncSession
from app.auth.deps import get_current_user
from app.database import get_db
from app.models.user import User
from app.schemas.settings import (
FeatureSettingsResponse,
FeatureSettingsUpdate,
PlannerWeatherResponse,
ServicePricingSettingsResponse,
ServicePricingSettingsUpdate,
SiteSettingsResponse,
SiteSettingsUpdate,
)
from app.services import settings as settings_service
from app.services.settings import FeatureSettingsSchemaOutdatedError, ServicePricingSchemaOutdatedError
router = APIRouter(prefix="/settings", tags=["Settings"])
PLANNER_WEATHER_URL = "https://api.open-meteo.com/v1/forecast"
PLANNER_WEATHER_TTL = timedelta(hours=1)
PLANNER_WEATHER_CACHE = {
"fetched_at": datetime.min.replace(tzinfo=UTC),
"weather": {},
}
async def _load_planner_weather_snapshot() -> tuple[datetime, dict[str, dict[str, int]]]:
fetched_at = PLANNER_WEATHER_CACHE["fetched_at"]
cached_weather = PLANNER_WEATHER_CACHE["weather"]
now = datetime.now(UTC)
if cached_weather and now - fetched_at < PLANNER_WEATHER_TTL:
return fetched_at, cached_weather
try:
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.get(
PLANNER_WEATHER_URL,
params={
"latitude": -36.85,
"longitude": 174.77,
"daily": "weathercode,temperature_2m_max,temperature_2m_min",
"timezone": "Pacific/Auckland",
"forecast_days": 16,
"past_days": 14,
},
)
response.raise_for_status()
payload = response.json()
except httpx.HTTPError:
if cached_weather:
return fetched_at, cached_weather
raise
next_weather: dict[str, dict[str, int]] = {}
daily = payload.get("daily") or {}
dates = daily.get("time") or []
codes = daily.get("weathercode") or []
highs = daily.get("temperature_2m_max") or []
lows = daily.get("temperature_2m_min") or []
for index, date_key in enumerate(dates):
if index >= len(codes) or index >= len(highs) or index >= len(lows):
continue
next_weather[date_key] = {
"code": int(codes[index]),
"max": round(highs[index]),
"min": round(lows[index]),
}
fetched_at = now
PLANNER_WEATHER_CACHE["fetched_at"] = fetched_at
PLANNER_WEATHER_CACHE["weather"] = next_weather
return fetched_at, next_weather
def _filter_planner_weather(
weather: dict[str, dict[str, int]],
start_date: str | None,
end_date: str | None,
) -> dict[str, dict[str, int]]:
if not start_date and not end_date:
return weather
filtered: dict[str, dict[str, int]] = {}
for key, value in weather.items():
if start_date and key < start_date:
continue
if end_date and key > end_date:
continue
filtered[key] = value
return filtered
@router.get("", response_model=SiteSettingsResponse)
async def get_settings(db: AsyncSession = Depends(get_db)):
"""Get site settings singleton."""
row = await settings_service.get_settings(db)
if row is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Site settings have not been configured yet. Run seed.py to initialise.",
)
return SiteSettingsResponse.model_validate(row)
@router.put("", response_model=SiteSettingsResponse)
async def update_settings(
data: SiteSettingsUpdate,
db: AsyncSession = Depends(get_db),
current_user: User = Depends(get_current_user),
):
"""Create or update site settings singleton (auth required)."""
row = await settings_service.upsert_settings(db, data)
return SiteSettingsResponse.model_validate(row)
@router.get("/features", response_model=FeatureSettingsResponse)
async def get_feature_settings(db: AsyncSession = Depends(get_db)):
snapshot = await settings_service.get_feature_settings_snapshot(db)
return FeatureSettingsResponse(
bookings_enabled=snapshot.bookings_enabled,
walks_enabled=snapshot.walks_enabled,
messages_enabled=snapshot.messages_enabled,
two_factor_enabled=snapshot.two_factor_enabled,
audit_history_enabled=snapshot.audit_history_enabled,
experiments_enabled=snapshot.experiments_enabled,
)
@router.put("/features", response_model=FeatureSettingsResponse)
async def update_feature_settings(
data: FeatureSettingsUpdate,
db: AsyncSession = Depends(get_db),
current_user: User = Depends(get_current_user),
):
del current_user
try:
snapshot = await settings_service.update_feature_settings_snapshot(db, data)
except FeatureSettingsSchemaOutdatedError as exc:
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(exc))
return FeatureSettingsResponse(
bookings_enabled=snapshot.bookings_enabled,
walks_enabled=snapshot.walks_enabled,
messages_enabled=snapshot.messages_enabled,
two_factor_enabled=snapshot.two_factor_enabled,
audit_history_enabled=snapshot.audit_history_enabled,
experiments_enabled=snapshot.experiments_enabled,
)
@router.get("/pricing", response_model=ServicePricingSettingsResponse)
async def get_service_pricing(db: AsyncSession = Depends(get_db)):
snapshot = await settings_service.get_service_pricing_snapshot(db)
return ServicePricingSettingsResponse(service_pricing=snapshot)
@router.put("/pricing", response_model=ServicePricingSettingsResponse)
async def update_service_pricing(
data: ServicePricingSettingsUpdate,
db: AsyncSession = Depends(get_db),
current_user: User = Depends(get_current_user),
):
del current_user
try:
snapshot = await settings_service.update_service_pricing_snapshot(
db,
service_pricing=data.service_pricing,
)
except ServicePricingSchemaOutdatedError as exc:
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(exc))
return ServicePricingSettingsResponse(service_pricing=snapshot)
@router.get("/planner-weather", response_model=PlannerWeatherResponse)
async def get_planner_weather(
start_date: str | None = Query(default=None),
end_date: str | None = Query(default=None),
current_user: User = Depends(get_current_user),
):
del current_user
for value, label in ((start_date, "start_date"), (end_date, "end_date")):
if not value:
continue
try:
datetime.strptime(value, "%Y-%m-%d")
except ValueError as exc:
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail=f"{label} must use YYYY-MM-DD format.",
) from exc
fetched_at, weather = await _load_planner_weather_snapshot()
return PlannerWeatherResponse(
fetched_at=fetched_at,
weather=_filter_planner_weather(weather, start_date, end_date),
)
View File
+85
View File
@@ -0,0 +1,85 @@
from pydantic import BaseModel, Field
from typing import Optional, Dict, Any, List
from datetime import datetime
import uuid
class EventCreate(BaseModel):
event_type: str = Field(..., max_length=64)
page: str = Field(..., max_length=255)
element: Optional[str] = Field(None, max_length=255)
metadata: Optional[Dict[str, Any]] = None
session_id: Optional[str] = Field(None, max_length=64)
class EventResponse(BaseModel):
id: uuid.UUID
event_type: str
page: str
element: Optional[str]
session_id: str
ip_partial: Optional[str]
browser: Optional[str]
os_name: Optional[str]
country: Optional[str]
city: Optional[str]
created_at: datetime
model_config = {"from_attributes": True}
class DailyStat(BaseModel):
date: str
count: int
class TopItem(BaseModel):
label: str
count: int
class AnalyticsSummary(BaseModel):
total_events_today: int
total_events_yesterday: int
page_views_today: int
unique_sessions_today: int
unique_sessions_total: int
total_events_all_time: int
events_by_type: List[TopItem]
top_pages: List[TopItem]
top_elements: List[TopItem]
top_journeys: List[TopItem]
top_browsers: List[TopItem]
top_os: List[TopItem]
top_countries: List[TopItem]
events_last_7_days: List[DailyStat]
recent_events: List[EventResponse]
class BookingActivityStat(BaseModel):
date: str
booked: int
cancellations: int
class BookingForwardLoadStat(BaseModel):
date: str
total: int
am: int
pm: int
class BookingCustomerVolume(BaseModel):
label: str
count: int
class BookingOperationsSummary(BaseModel):
active_bookings_total: int
forward_load_total: int
booked_last_30_days: int
cancellations_last_30_days: int
high_volume_customer_count: int
forward_load_next_14_days: List[BookingForwardLoadStat]
activity_last_30_days: List[BookingActivityStat]
top_high_volume_customers: List[BookingCustomerVolume]
+37
View File
@@ -0,0 +1,37 @@
import uuid
from datetime import datetime
from typing import Optional
from pydantic import BaseModel
class AuditLogResponse(BaseModel):
id: uuid.UUID
timestamp: datetime
member_id: Optional[uuid.UUID]
member_email: Optional[str]
action_type: str
area: str
description: str
status: str
booking_id: Optional[uuid.UUID]
error_message: Optional[str]
error_detail: Optional[str]
ip_address: Optional[str]
user_agent: Optional[str]
extra: Optional[dict]
model_config = {"from_attributes": True}
class AuditLogPage(BaseModel):
items: list[AuditLogResponse]
total: int
page: int
page_size: int
total_pages: int
class PageVisitSchema(BaseModel):
path: str
title: Optional[str] = None
+28
View File
@@ -0,0 +1,28 @@
import uuid
from datetime import datetime
from typing import Optional
from pydantic import BaseModel, EmailStr, ConfigDict
class LoginRequest(BaseModel):
email: str
password: str
class TokenResponse(BaseModel):
access_token: str
refresh_token: str
token_type: str = "bearer"
class RefreshRequest(BaseModel):
refresh_token: str
class UserResponse(BaseModel):
id: uuid.UUID
email: str
is_active: bool
created_at: datetime
model_config = ConfigDict(from_attributes=True)
+57
View File
@@ -0,0 +1,57 @@
import uuid
from datetime import datetime
from typing import Any, Optional
from pydantic import BaseModel, Field
class ContactLeadCreate(BaseModel):
name: str = Field(min_length=1, max_length=255)
email: str
phone: Optional[str] = Field(default=None, max_length=50)
service: Optional[str] = Field(default=None, max_length=255)
services: list[str] = Field(default_factory=list)
petName: Optional[str] = Field(default=None, max_length=100)
petBreed: Optional[str] = Field(default=None, max_length=100)
location: Optional[str] = Field(default=None, max_length=100)
serviceAreaStatus: Optional[str] = Field(default=None, max_length=32)
message: Optional[str] = Field(default=None, max_length=5000)
source: str = Field(default="contact_form", max_length=50)
class ContactLeadResponse(BaseModel):
id: uuid.UUID
full_name: str
email: str
phone: Optional[str]
requested_services: Optional[str]
pet_name: Optional[str]
pet_breed: Optional[str]
suburb: Optional[str]
service_area_status: Optional[str]
message: Optional[str]
source: str
status: str
notes: Optional[str]
invited_at: Optional[datetime]
invited_member_id: Optional[uuid.UUID]
metadata_json: Optional[dict[str, Any]]
created_at: datetime
updated_at: datetime
model_config = {"from_attributes": True}
class ContactLeadUpdate(BaseModel):
status: Optional[str] = Field(default=None, max_length=32)
notes: Optional[str] = Field(default=None, max_length=5000)
class ContactLeadInviteRequest(BaseModel):
send_email: bool = True
class ContactLeadInviteResponse(BaseModel):
lead: ContactLeadResponse
member_id: uuid.UUID
member_status: str
+154
View File
@@ -0,0 +1,154 @@
from datetime import datetime
from decimal import Decimal
from typing import Any
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field, field_validator
KEY_PATTERN = r"^[a-z0-9_]{3,64}$"
SESSION_PATTERN = r"^[A-Za-z0-9_-]{8,128}$"
def _validate_metadata(metadata: dict[str, Any] | None) -> dict[str, Any] | None:
if metadata is None:
return None
if len(metadata) > 20:
raise ValueError("metadata must contain at most 20 keys")
clean: dict[str, Any] = {}
for key, value in metadata.items():
if not isinstance(key, str) or len(key) > 48:
raise ValueError("metadata keys must be strings up to 48 characters")
if isinstance(value, (str, int, float, bool)) or value is None:
clean[key] = value
continue
raise ValueError("metadata values must be scalar JSON types")
return clean
class ExperimentVariantDefinition(BaseModel):
variant_key: str = Field(..., pattern=KEY_PATTERN)
label: str = Field(..., min_length=1, max_length=120)
allocation: int = Field(..., ge=0, le=100)
is_control: bool
class ExperimentDefinitionResponse(BaseModel):
experiment_key: str = Field(..., pattern=KEY_PATTERN)
cookie_name: str = Field(..., min_length=3, max_length=96)
name: str
description: str | None = None
enabled: bool
eligible_routes: list[str]
variants: list[ExperimentVariantDefinition]
class ExperimentDefinitionUpdate(BaseModel):
cookie_name: str = Field(..., min_length=3, max_length=96)
name: str = Field(..., min_length=1, max_length=120)
description: str | None = Field(default=None, max_length=512)
enabled: bool
eligible_routes: list[str] = Field(default_factory=list, min_length=1)
variants: list[ExperimentVariantDefinition] = Field(..., min_length=2)
@field_validator("cookie_name")
@classmethod
def validate_cookie_name(cls, value: str) -> str:
if not value.startswith("exp_"):
raise ValueError("cookie_name must start with 'exp_'")
return value
@field_validator("eligible_routes")
@classmethod
def validate_routes(cls, value: list[str]) -> list[str]:
normalized: list[str] = []
for route in value:
if not route.startswith("/"):
raise ValueError("eligible routes must start with '/'")
normalized.append(route.rstrip("/") or "/")
return normalized
@field_validator("variants")
@classmethod
def validate_variants(cls, value: list[ExperimentVariantDefinition]) -> list[ExperimentVariantDefinition]:
if sum(1 for item in value if item.is_control) != 1:
raise ValueError("exactly one control variant is required")
if sum(item.allocation for item in value) <= 0:
raise ValueError("variant allocation total must be greater than zero")
return value
class ExperimentEventBase(BaseModel):
experiment_key: str = Field(..., pattern=KEY_PATTERN)
variant_key: str = Field(..., pattern=KEY_PATTERN)
session_id: str = Field(..., pattern=SESSION_PATTERN)
user_id: str | None = Field(None, max_length=64)
path: str = Field(..., min_length=1, max_length=255)
timestamp: datetime
metadata: dict[str, Any] | None = None
@field_validator("path")
@classmethod
def validate_path(cls, value: str) -> str:
if not value.startswith("/"):
raise ValueError("path must start with '/'")
return value
@field_validator("metadata")
@classmethod
def validate_metadata(cls, value: dict[str, Any] | None) -> dict[str, Any] | None:
return _validate_metadata(value)
class ExperimentImpressionCreate(ExperimentEventBase):
event_name: str = Field(default="impression", pattern=r"^impression$")
class ExperimentEventCreate(ExperimentEventBase):
event_name: str = Field(..., pattern=r"^(cta_click|form_start|form_submit)$")
class ExperimentConversionCreate(ExperimentEventBase):
event_name: str = Field(default="conversion", pattern=r"^conversion$")
conversion_value: Decimal | None = Field(default=None, max_digits=12, decimal_places=2)
class ExperimentEventResponse(BaseModel):
id: UUID
experiment_key: str
variant_key: str
session_id: str
user_id: str | None = None
path: str
event_type: str
conversion_value: Decimal | None = None
metadata: dict[str, Any] | None = None
created_at: datetime
model_config = ConfigDict(from_attributes=True)
class ExperimentVariantResult(BaseModel):
variant_key: str
impressions: int
cta_clicks: int
form_starts: int
form_submits: int
conversions: int
unique_sessions: int
conversion_rate: float
conversion_value_total: float
class ExperimentResult(BaseModel):
experiment_key: str
generated_at: datetime
variants: list[ExperimentVariantResult]
class ExperimentIngestResponse(BaseModel):
ok: bool
accepted: bool
+370
View File
@@ -0,0 +1,370 @@
import uuid
from datetime import datetime
from typing import Optional, Any
from pydantic import BaseModel
# ── Magic link ─────────────────────────────────────────────────────────────────
class MagicLinkVerifySchema(BaseModel):
token: str
# ── Claim ──────────────────────────────────────────────────────────────────────
class ClaimRequestSchema(BaseModel):
email: str
class ClaimCompleteSchema(BaseModel):
email: str
code: str
password: str
class MemberClaimVerifyCodeSchema(BaseModel):
code: str
password: str
# ── Auth ───────────────────────────────────────────────────────────────────────
class MemberLoginSchema(BaseModel):
email: str
password: str
class MemberLoginVerifySchema(BaseModel):
email: str
code: str
class MemberTokenResponse(BaseModel):
access_token: str
refresh_token: str
token_type: str = "bearer"
class MemberRefreshSchema(BaseModel):
refresh_token: str
class MemberLogoutSchema(BaseModel):
refresh_token: Optional[str] = None
# ── Profile ────────────────────────────────────────────────────────────────────
class MemberProfileResponse(BaseModel):
id: uuid.UUID
email: str
first_name: str
last_name: str
phone: Optional[str]
address: Optional[str]
emergency_contact: Optional[str]
notifications_enabled: bool
is_claimed: bool
member_status: str
activated_at: Optional[datetime]
created_at: datetime
model_config = {"from_attributes": True}
class MemberProfileUpdate(BaseModel):
first_name: Optional[str] = None
last_name: Optional[str] = None
phone: Optional[str] = None
address: Optional[str] = None
emergency_contact: Optional[str] = None
notifications_enabled: Optional[bool] = None
class MemberOnboardingResponse(BaseModel):
id: uuid.UUID
email: str
first_name: str
last_name: str
phone: Optional[str]
address: Optional[str]
emergency_contact: Optional[str]
notifications_enabled: bool
onboarding_data: Optional[Any]
is_claimed: bool
member_status: str
claimed_at: Optional[datetime]
onboarding_completed_at: Optional[datetime]
contract_signed_at: Optional[datetime]
contract_signer_name: Optional[str]
contract_version: Optional[str]
activated_at: Optional[datetime]
created_at: datetime
model_config = {"from_attributes": True}
class MemberOnboardingUpdate(BaseModel):
first_name: Optional[str] = None
last_name: Optional[str] = None
phone: Optional[str] = None
address: Optional[str] = None
emergency_contact: Optional[str] = None
onboarding_data: Optional[Any] = None
complete_onboarding: bool = False
class ContractSignSchema(BaseModel):
signer_name: str
agreed: bool
contract_version: Optional[str] = None
# ── Walks ──────────────────────────────────────────────────────────────────────
class WalkResponse(BaseModel):
id: uuid.UUID
service_type: str
walked_at: datetime
duration_minutes: int
notes: Optional[str]
recorded_by: Optional[str]
created_at: datetime
model_config = {"from_attributes": True}
# ── Bookings ───────────────────────────────────────────────────────────────────
class BookingCreate(BaseModel):
service_type: str
requested_day: Optional[str] = None
requested_date: Optional[datetime] = None
requested_timeslot: Optional[str] = None
notes: Optional[str] = None
class BookingResponse(BaseModel):
id: uuid.UUID
service_type: str
requested_date: Optional[datetime]
status: str
notes: Optional[str]
admin_notes: Optional[str]
created_at: datetime
model_config = {"from_attributes": True}
class BookingSlotAvailabilityResponse(BaseModel):
slot: str
label: str
booked: int
capacity: int
remaining: int
is_available: bool
class BookingAvailabilityDayResponse(BaseModel):
date: str
label: str
slots: list[BookingSlotAvailabilityResponse]
class BookingAvailabilityResponse(BaseModel):
requested_date: str
selected: BookingAvailabilityDayResponse
alternatives: list[BookingAvailabilityDayResponse]
# ── Messages ───────────────────────────────────────────────────────────────────
class MessageResponse(BaseModel):
id: uuid.UUID
subject: str
body: str
sent_by: Optional[str]
read_at: Optional[datetime]
created_at: datetime
direction: str = "inbound"
reply_to_id: Optional[uuid.UUID] = None
model_config = {"from_attributes": True}
class MemberReplySchema(BaseModel):
body: str
# ── Admin: Create Member ───────────────────────────────────────────────────────
class AdminCreateMember(BaseModel):
email: str
first_name: str
last_name: str
phone: Optional[str] = None
address: Optional[str] = None
emergency_contact: Optional[str] = None
onboarding_data: Optional[Any] = None
service_pricing_overrides: Optional[Any] = None
force_two_factor: Optional[bool] = None
class AdminMemberResponse(BaseModel):
id: uuid.UUID
email: str
first_name: str
last_name: str
phone: Optional[str]
address: Optional[str]
emergency_contact: Optional[str]
notifications_enabled: bool
onboarding_data: Optional[Any]
is_claimed: bool
is_active: bool
member_status: str
claimed_at: Optional[datetime]
onboarding_completed_at: Optional[datetime]
contract_signed_at: Optional[datetime]
contract_signer_name: Optional[str]
contract_version: Optional[str]
activated_at: Optional[datetime]
service_pricing_overrides: Optional[Any]
force_two_factor: Optional[bool]
created_at: datetime
model_config = {"from_attributes": True}
class AdminMemberUpdate(BaseModel):
first_name: Optional[str] = None
last_name: Optional[str] = None
phone: Optional[str] = None
address: Optional[str] = None
emergency_contact: Optional[str] = None
notifications_enabled: Optional[bool] = None
onboarding_data: Optional[Any] = None
is_active: Optional[bool] = None
member_status: Optional[str] = None
service_pricing_overrides: Optional[Any] = None
force_two_factor: Optional[bool] = None
class AdminMemberToggleAction(BaseModel):
enabled: bool
class AdminBookingResponse(BaseModel):
id: uuid.UUID
member_id: uuid.UUID
service_type: str
requested_date: Optional[datetime]
status: str
notes: Optional[str]
admin_notes: Optional[str]
created_at: datetime
# Joined fields
member_first_name: Optional[str] = None
member_last_name: Optional[str] = None
member_email: Optional[str] = None
member_dog_name: Optional[str] = None
member_dog_breed: Optional[str] = None
model_config = {"from_attributes": True}
class AdminBookingCreate(BaseModel):
member_id: uuid.UUID
service_type: str
requested_date: Optional[datetime] = None
status: str = "confirmed"
notes: Optional[str] = None
admin_notes: Optional[str] = None
class AdminBookingUpdate(BaseModel):
requested_date: Optional[datetime] = None
status: Optional[str] = None # pending | confirmed | cancelled | completed
notes: Optional[str] = None
admin_notes: Optional[str] = None
# ── Admin: Record Walk ─────────────────────────────────────────────────────────
class AdminRecordWalk(BaseModel):
member_id: uuid.UUID
walked_at: datetime
service_type: str
duration_minutes: int = 60
notes: Optional[str] = None
# ── Admin: Send Message ────────────────────────────────────────────────────────
class AdminSendMessage(BaseModel):
member_id: uuid.UUID
subject: str
body: str
class AdminNotificationSettingsResponse(BaseModel):
automatic_member_notifications_enabled: bool
nz_public_holiday_notifications_enabled: bool
invoice_reminder_notifications_enabled: bool
invoice_day_of_week: int
class AdminNotificationSettingsUpdate(BaseModel):
automatic_member_notifications_enabled: Optional[bool] = None
nz_public_holiday_notifications_enabled: Optional[bool] = None
invoice_reminder_notifications_enabled: Optional[bool] = None
invoice_day_of_week: Optional[int] = None
class AdminNotificationRunResponse(BaseModel):
automatic_member_notifications_enabled: bool
public_holiday_messages_sent: int
invoice_reminders_sent: int
class AdminNotificationFeedItemResponse(BaseModel):
id: str
type: str
title: str
description: str
created_at: datetime
href: str
class AdminNotificationsResponse(BaseModel):
items: list[AdminNotificationFeedItemResponse]
total: int
settings: AdminNotificationSettingsResponse
class AdminMessageHistoryResponse(BaseModel):
id: uuid.UUID
member_id: uuid.UUID
member_name: str
member_email: str
subject: str
body: str
sent_by: Optional[str]
created_at: datetime
read_at: Optional[datetime]
# ── Contract ───────────────────────────────────────────────────────────────────
class ContractResponse(BaseModel):
onboarding_data: Optional[Any]
member_name: str
email: str
member_status: str
contract_signed_at: Optional[datetime]
contract_signer_name: Optional[str]
contract_version: Optional[str]
activated_at: Optional[datetime]
joined_at: datetime
model_config = {"from_attributes": True}
+36
View File
@@ -0,0 +1,36 @@
import uuid
from datetime import datetime
from typing import Optional
from pydantic import BaseModel, ConfigDict
class PageBase(BaseModel):
title: str
slug: str
body: str = ""
meta_title: Optional[str] = None
meta_description: Optional[str] = None
og_image_url: Optional[str] = None
published: bool = False
class PageCreate(PageBase):
pass
class PageUpdate(BaseModel):
title: Optional[str] = None
slug: Optional[str] = None
body: Optional[str] = None
meta_title: Optional[str] = None
meta_description: Optional[str] = None
og_image_url: Optional[str] = None
published: Optional[bool] = None
class PageResponse(PageBase):
id: uuid.UUID
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
+46
View File
@@ -0,0 +1,46 @@
import uuid
from datetime import datetime
from typing import Optional, List
from pydantic import BaseModel, ConfigDict
class PostBase(BaseModel):
title: str
slug: str
excerpt: Optional[str] = None
body: str = ""
author: Optional[str] = None
featured_image_url: Optional[str] = None
tags: List[str] = []
published: bool = False
class PostCreate(PostBase):
pass
class PostUpdate(BaseModel):
title: Optional[str] = None
slug: Optional[str] = None
excerpt: Optional[str] = None
body: Optional[str] = None
author: Optional[str] = None
featured_image_url: Optional[str] = None
tags: Optional[List[str]] = None
published: Optional[bool] = None
class PostResponse(PostBase):
id: uuid.UUID
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
class PaginatedPostsResponse(BaseModel):
items: List[PostResponse]
total: int
page: int
per_page: int
total_pages: int
+91
View File
@@ -0,0 +1,91 @@
import uuid
from datetime import datetime
from typing import Optional, Dict, Any
from pydantic import BaseModel, ConfigDict, Field
from app.services.pricing import default_service_pricing
class SiteSettingsBase(BaseModel):
site_name: str = ""
tagline: Optional[str] = None
logo_url: Optional[str] = None
footer_text: Optional[str] = None
social_links: Dict[str, Any] = {}
automatic_member_notifications_enabled: bool = True
nz_public_holiday_notifications_enabled: bool = True
invoice_reminder_notifications_enabled: bool = True
invoice_day_of_week: int = 1
bookings_enabled: bool = True
walks_enabled: bool = True
messages_enabled: bool = True
two_factor_enabled: bool = True
audit_history_enabled: bool = True
experiments_enabled: bool = True
class SiteSettingsUpdate(BaseModel):
site_name: Optional[str] = None
tagline: Optional[str] = None
logo_url: Optional[str] = None
footer_text: Optional[str] = None
social_links: Optional[Dict[str, Any]] = None
automatic_member_notifications_enabled: Optional[bool] = None
nz_public_holiday_notifications_enabled: Optional[bool] = None
invoice_reminder_notifications_enabled: Optional[bool] = None
invoice_day_of_week: Optional[int] = None
bookings_enabled: Optional[bool] = None
walks_enabled: Optional[bool] = None
messages_enabled: Optional[bool] = None
two_factor_enabled: Optional[bool] = None
audit_history_enabled: Optional[bool] = None
experiments_enabled: Optional[bool] = None
class FeatureSettingsBase(BaseModel):
bookings_enabled: bool = True
walks_enabled: bool = True
messages_enabled: bool = True
two_factor_enabled: bool = True
audit_history_enabled: bool = True
experiments_enabled: bool = True
class FeatureSettingsUpdate(BaseModel):
bookings_enabled: Optional[bool] = None
walks_enabled: Optional[bool] = None
messages_enabled: Optional[bool] = None
two_factor_enabled: Optional[bool] = None
audit_history_enabled: Optional[bool] = None
experiments_enabled: Optional[bool] = None
class FeatureSettingsResponse(FeatureSettingsBase):
pass
class ServicePricingSettingsResponse(BaseModel):
service_pricing: Dict[str, Any] = Field(default_factory=default_service_pricing)
class ServicePricingSettingsUpdate(BaseModel):
service_pricing: Dict[str, Any]
class PlannerWeatherDay(BaseModel):
code: int
max: int
min: int
class PlannerWeatherResponse(BaseModel):
fetched_at: datetime
weather: Dict[str, PlannerWeatherDay]
class SiteSettingsResponse(SiteSettingsBase):
id: uuid.UUID
created_at: datetime
updated_at: datetime
model_config = ConfigDict(from_attributes=True)
View File
+393
View File
@@ -0,0 +1,393 @@
from datetime import date, timedelta
from sqlalchemy import Date, case, cast, func, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.analytics import AnalyticsEvent
from app.models.member import Booking, Member
from app.schemas.analytics import EventCreate
async def record_event(
db: AsyncSession,
data: EventCreate,
ip_hash: str | None,
ip_partial: str | None = None,
user_agent: str | None = None,
browser: str | None = None,
os_name: str | None = None,
country: str | None = None,
city: str | None = None,
) -> AnalyticsEvent:
"""Insert a new analytics event and return it."""
event = AnalyticsEvent(
event_type=data.event_type,
page=data.page,
element=data.element,
metadata_=data.metadata,
session_id=data.session_id,
ip_hash=ip_hash,
ip_partial=ip_partial,
user_agent=user_agent,
browser=browser,
os_name=os_name,
country=country,
city=city,
)
db.add(event)
await db.commit()
await db.refresh(event)
return event
async def get_summary(db: AsyncSession) -> dict:
"""Return all summary data needed for AnalyticsSummary."""
today = date.today()
yesterday = today - timedelta(days=1)
week_ago = today - timedelta(days=6)
date_col = cast(AnalyticsEvent.created_at, Date)
# Total events today
result = await db.execute(
select(func.count()).select_from(AnalyticsEvent).where(date_col == today)
)
total_events_today = result.scalar_one()
# Total events yesterday
result = await db.execute(
select(func.count()).select_from(AnalyticsEvent).where(date_col == yesterday)
)
total_events_yesterday = result.scalar_one()
# Page views today
result = await db.execute(
select(func.count())
.select_from(AnalyticsEvent)
.where(date_col == today)
.where(AnalyticsEvent.event_type == "page_view")
)
page_views_today = result.scalar_one()
# Unique sessions today
result = await db.execute(
select(func.count(AnalyticsEvent.session_id.distinct()))
.select_from(AnalyticsEvent)
.where(date_col == today)
)
unique_sessions_today = result.scalar_one()
# Unique sessions total
result = await db.execute(
select(func.count(AnalyticsEvent.session_id.distinct())).select_from(AnalyticsEvent)
)
unique_sessions_total = result.scalar_one()
# Total events all time
result = await db.execute(
select(func.count()).select_from(AnalyticsEvent)
)
total_events_all_time = result.scalar_one()
# Events by type (top 10, all time)
result = await db.execute(
select(AnalyticsEvent.event_type, func.count().label("cnt"))
.group_by(AnalyticsEvent.event_type)
.order_by(func.count().desc())
.limit(10)
)
events_by_type = [{"label": r.event_type, "count": r.cnt} for r in result.all()]
# Top pages (page_view events, top 10)
result = await db.execute(
select(AnalyticsEvent.page, func.count().label("cnt"))
.where(AnalyticsEvent.event_type == "page_view")
.group_by(AnalyticsEvent.page)
.order_by(func.count().desc())
.limit(10)
)
top_pages = [{"label": r.page, "count": r.cnt} for r in result.all()]
# Top elements (non page_view, top 10)
result = await db.execute(
select(AnalyticsEvent.element, func.count().label("cnt"))
.where(AnalyticsEvent.event_type != "page_view")
.where(AnalyticsEvent.element.isnot(None))
.group_by(AnalyticsEvent.element)
.order_by(func.count().desc())
.limit(10)
)
top_elements = [{"label": r.element, "count": r.cnt} for r in result.all()]
# Top journeys (page-to-page flows derived from page_view events per session)
result = await db.execute(
select(
AnalyticsEvent.session_id,
AnalyticsEvent.page,
)
.where(AnalyticsEvent.event_type == "page_view")
.order_by(AnalyticsEvent.session_id, AnalyticsEvent.created_at, AnalyticsEvent.id)
)
journey_counts: dict[str, int] = {}
current_session = None
previous_page = None
for row in result.all():
if row.session_id != current_session:
current_session = row.session_id
previous_page = None
if row.page == previous_page:
continue
if previous_page is not None:
journey = f"{previous_page} -> {row.page}"
journey_counts[journey] = journey_counts.get(journey, 0) + 1
previous_page = row.page
top_journeys = [
{"label": label, "count": count}
for label, count in sorted(
journey_counts.items(),
key=lambda item: (-item[1], item[0]),
)[:10]
]
# Top browsers
result = await db.execute(
select(AnalyticsEvent.browser, func.count().label("cnt"))
.where(AnalyticsEvent.browser.isnot(None))
.group_by(AnalyticsEvent.browser)
.order_by(func.count().desc())
.limit(8)
)
top_browsers = [{"label": r.browser, "count": r.cnt} for r in result.all()]
# Top OS
result = await db.execute(
select(AnalyticsEvent.os_name, func.count().label("cnt"))
.where(AnalyticsEvent.os_name.isnot(None))
.group_by(AnalyticsEvent.os_name)
.order_by(func.count().desc())
.limit(8)
)
top_os = [{"label": r.os_name, "count": r.cnt} for r in result.all()]
# Top countries
result = await db.execute(
select(AnalyticsEvent.country, func.count().label("cnt"))
.where(AnalyticsEvent.country.isnot(None))
.group_by(AnalyticsEvent.country)
.order_by(func.count().desc())
.limit(8)
)
top_countries = [{"label": r.country, "count": r.cnt} for r in result.all()]
# Last 7 days counts
result = await db.execute(
select(date_col.label("day"), func.count().label("cnt"))
.where(date_col >= week_ago)
.group_by(date_col)
.order_by(date_col)
)
days = {str(r.day): r.cnt for r in result.all()}
last_7 = []
for i in range(6, -1, -1):
d = str(today - timedelta(days=i))
last_7.append({"date": d, "count": days.get(d, 0)})
# Recent events (last 30)
result = await db.execute(
select(AnalyticsEvent)
.order_by(AnalyticsEvent.created_at.desc())
.limit(30)
)
recent = list(result.scalars().all())
return {
"total_events_today": total_events_today,
"total_events_yesterday": total_events_yesterday,
"page_views_today": page_views_today,
"unique_sessions_today": unique_sessions_today,
"unique_sessions_total": unique_sessions_total,
"total_events_all_time": total_events_all_time,
"events_by_type": events_by_type,
"top_pages": top_pages,
"top_elements": top_elements,
"top_journeys": top_journeys,
"top_browsers": top_browsers,
"top_os": top_os,
"top_countries": top_countries,
"events_last_7_days": last_7,
"recent_events": recent,
}
async def get_booking_operations_summary(db: AsyncSession) -> dict:
"""Return booking operations reporting for the admin Reporting page."""
today = date.today()
activity_start = today - timedelta(days=29)
forward_load_end = today + timedelta(days=13)
created_date_col = cast(Booking.created_at, Date)
updated_date_col = cast(Booking.updated_at, Date)
requested_date_col = cast(Booking.requested_date, Date)
active_statuses = ("pending", "confirmed", "completed")
forward_statuses = ("pending", "confirmed")
active_total_result = await db.execute(
select(func.count())
.select_from(Booking)
.where(Booking.status.in_(active_statuses))
)
active_bookings_total = active_total_result.scalar_one()
forward_load_total_result = await db.execute(
select(func.count())
.select_from(Booking)
.where(Booking.status.in_(forward_statuses))
.where(Booking.requested_date.is_not(None))
.where(requested_date_col >= today)
.where(requested_date_col <= forward_load_end)
)
forward_load_total = forward_load_total_result.scalar_one()
booked_last_30_days_result = await db.execute(
select(func.count())
.select_from(Booking)
.where(created_date_col >= activity_start)
.where(created_date_col <= today)
)
booked_last_30_days = booked_last_30_days_result.scalar_one()
cancellations_last_30_days_result = await db.execute(
select(func.count())
.select_from(Booking)
.where(Booking.status == "cancelled")
.where(updated_date_col >= activity_start)
.where(updated_date_col <= today)
)
cancellations_last_30_days = cancellations_last_30_days_result.scalar_one()
high_volume_result = await db.execute(
select(func.count().label("booking_count"))
.select_from(Booking)
.where(Booking.status.in_(forward_statuses))
.where(Booking.requested_date.is_not(None))
.where(requested_date_col >= today)
.group_by(Booking.member_id)
.having(func.count() >= 3)
)
high_volume_customer_count = len(high_volume_result.all())
forward_load_result = await db.execute(
select(
requested_date_col.label("day"),
func.count().label("total"),
func.sum(
case(
(func.extract("hour", Booking.requested_date) < 12, 1),
else_=0,
)
).label("am"),
func.sum(
case(
(func.extract("hour", Booking.requested_date) >= 12, 1),
else_=0,
)
).label("pm"),
)
.where(Booking.status.in_(forward_statuses))
.where(Booking.requested_date.is_not(None))
.where(requested_date_col >= today)
.where(requested_date_col <= forward_load_end)
.group_by(requested_date_col)
.order_by(requested_date_col)
)
forward_load_by_day = {
str(row.day): {
"total": int(row.total or 0),
"am": int(row.am or 0),
"pm": int(row.pm or 0),
}
for row in forward_load_result.all()
}
forward_load_next_14_days = []
for offset in range(14):
current_day = str(today + timedelta(days=offset))
values = forward_load_by_day.get(current_day, {"total": 0, "am": 0, "pm": 0})
forward_load_next_14_days.append({
"date": current_day,
"total": values["total"],
"am": values["am"],
"pm": values["pm"],
})
booked_activity_result = await db.execute(
select(created_date_col.label("day"), func.count().label("count"))
.where(created_date_col >= activity_start)
.where(created_date_col <= today)
.group_by(created_date_col)
.order_by(created_date_col)
)
booked_by_day = {str(row.day): int(row.count or 0) for row in booked_activity_result.all()}
cancellation_activity_result = await db.execute(
select(updated_date_col.label("day"), func.count().label("count"))
.where(Booking.status == "cancelled")
.where(updated_date_col >= activity_start)
.where(updated_date_col <= today)
.group_by(updated_date_col)
.order_by(updated_date_col)
)
cancellations_by_day = {
str(row.day): int(row.count or 0)
for row in cancellation_activity_result.all()
}
activity_last_30_days = []
for offset in range(30):
current_day = str(activity_start + timedelta(days=offset))
activity_last_30_days.append({
"date": current_day,
"booked": booked_by_day.get(current_day, 0),
"cancellations": cancellations_by_day.get(current_day, 0),
})
volume_result = await db.execute(
select(
Member.first_name,
Member.last_name,
func.count(Booking.id).label("count"),
)
.join(Member, Booking.member_id == Member.id)
.where(Booking.status.in_(forward_statuses))
.where(Booking.requested_date.is_not(None))
.where(requested_date_col >= today)
.group_by(Member.id, Member.first_name, Member.last_name)
.order_by(func.count(Booking.id).desc(), Member.first_name.asc(), Member.last_name.asc())
.limit(8)
)
top_high_volume_customers = [
{
"label": " ".join(part for part in [row.first_name, row.last_name] if part).strip() or "Client",
"count": int(row.count or 0),
}
for row in volume_result.all()
]
return {
"active_bookings_total": int(active_bookings_total or 0),
"forward_load_total": int(forward_load_total or 0),
"booked_last_30_days": int(booked_last_30_days or 0),
"cancellations_last_30_days": int(cancellations_last_30_days or 0),
"high_volume_customer_count": int(high_volume_customer_count or 0),
"forward_load_next_14_days": forward_load_next_14_days,
"activity_last_30_days": activity_last_30_days,
"top_high_volume_customers": top_high_volume_customers,
}
+56
View File
@@ -0,0 +1,56 @@
"""
Audit logging service.
Call `log_audit(db, ...)` from within any request handler that already holds
an open AsyncSession. The entry is added to the session — it will be
committed with the surrounding transaction.
For error logging outside a request session (e.g. exception middleware), open
a fresh session via `AsyncSessionLocal`, call `log_audit`, then `commit`.
"""
import uuid
from datetime import datetime, timezone
from typing import Optional
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.audit import AuditLog
from app.services.settings import get_feature_settings_snapshot
async def log_audit(
db: AsyncSession,
*,
action_type: str,
area: str,
description: str,
member_id: Optional[uuid.UUID] = None,
member_email: Optional[str] = None,
status: str = "success",
booking_id: Optional[uuid.UUID] = None,
error_message: Optional[str] = None,
error_detail: Optional[str] = None,
ip_address: Optional[str] = None,
user_agent: Optional[str] = None,
extra: Optional[dict] = None,
) -> None:
feature_settings = await get_feature_settings_snapshot(db)
if not feature_settings.audit_history_enabled:
return
entry = AuditLog(
timestamp=datetime.now(timezone.utc),
member_id=member_id,
member_email=member_email,
action_type=action_type,
area=area,
description=description,
status=status,
booking_id=booking_id,
error_message=error_message,
error_detail=error_detail,
ip_address=ip_address,
user_agent=user_agent,
extra=extra,
)
db.add(entry)
+166
View File
@@ -0,0 +1,166 @@
"""
Email sending service.
In development (SMTP_HOST unset or EMAIL_BACKEND=console), codes are printed to
stdout instead of being sent. In production set:
SMTP_HOST, SMTP_PORT, SMTP_USER, SMTP_PASSWORD, EMAIL_FROM
"""
import asyncio
import smtplib
import ssl
import logging
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from functools import partial
from app.config import settings
logger = logging.getLogger(__name__)
def _send_smtp_blocking(to_address: str, subject: str, html_body: str, text_body: str) -> None:
context = ssl.create_default_context()
with smtplib.SMTP(settings.SMTP_HOST, settings.SMTP_PORT) as server:
if settings.SMTP_USE_TLS:
server.starttls(context=context)
if settings.SMTP_USER and settings.SMTP_PASSWORD:
server.login(settings.SMTP_USER, settings.SMTP_PASSWORD)
msg = MIMEMultipart("alternative")
msg["Subject"] = subject
msg["From"] = settings.EMAIL_FROM
msg["To"] = to_address
msg.attach(MIMEText(text_body, "plain"))
msg.attach(MIMEText(html_body, "html"))
server.sendmail(settings.EMAIL_FROM, to_address, msg.as_string())
async def send_email(to_address: str, subject: str, html_body: str, text_body: str) -> None:
if settings.EMAIL_BACKEND == "console" or not settings.SMTP_HOST:
logger.info(
"\n%s\nTO: %s\nSUBJECT: %s\n%s\n%s",
"=" * 60,
to_address,
subject,
text_body,
"=" * 60,
)
print(f"\n{'='*60}\nEMAIL TO: {to_address}\nSUBJECT: {subject}\n{text_body}\n{'='*60}\n")
return
loop = asyncio.get_event_loop()
fn = partial(_send_smtp_blocking, to_address, subject, html_body, text_body)
await loop.run_in_executor(None, fn)
# ── Template helpers ───────────────────────────────────────────────────────────
def _base_html(content: str) -> str:
return f"""<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
body {{ font-family: 'Readex Pro', Arial, sans-serif; background: #FBFBFB; margin: 0; padding: 0; }}
.container {{ max-width: 560px; margin: 40px auto; background: #fff; border-radius: 16px;
overflow: hidden; box-shadow: 0 4px 24px rgba(0,40,66,.10); }}
.header {{ background: #002842; padding: 32px 40px; text-align: center; }}
.header h1 {{ color: #FFD100; font-family: 'Fredoka One', Arial, sans-serif;
font-size: 28px; margin: 0; letter-spacing: .5px; }}
.header p {{ color: #E5EEFF; margin: 6px 0 0; font-size: 14px; }}
.body {{ padding: 36px 40px; color: #2E3031; }}
.body p {{ line-height: 1.6; margin: 0 0 16px; }}
.code-box {{ background: #E5EEFF; border-radius: 12px; padding: 20px;
text-align: center; margin: 24px 0; }}
.code {{ font-size: 36px; font-weight: 700; letter-spacing: 10px; color: #002842;
font-family: 'Fredoka One', monospace; }}
.footer {{ background: #F4F6FB; padding: 20px 40px; text-align: center;
font-size: 12px; color: #888; border-top: 1px solid #E5EEFF; }}
.expiry {{ color: #888; font-size: 13px; }}
</style>
</head>
<body>
<div class="container">
<div class="header">
<h1>🐾 Goodwalk</h1>
<p>Auckland's favourite dog walking service</p>
</div>
{content}
<div class="footer">
<p>Goodwalk &mdash; Auckland, New Zealand<br>
<a href="mailto:info@goodwalk.co.nz" style="color:#FFD100;">info@goodwalk.co.nz</a>
</p>
<p>If you didn't request this email, you can safely ignore it.</p>
</div>
</div>
</body>
</html>"""
async def send_claim_code(to_address: str, first_name: str, code: str) -> None:
subject = "Claim your Goodwalk Members Account"
html_body = _base_html(f"""
<div class="body">
<p>Hi {first_name}!</p>
<p>Welcome to the Goodwalk Members Area. Use the code below to claim your account.
It expires in <strong>15 minutes</strong>.</p>
<div class="code-box">
<div class="code">{code}</div>
</div>
<p class="expiry">This code is valid for 15 minutes and can only be used once.</p>
<p>If you didn't request this, please contact us at
<a href="mailto:info@goodwalk.co.nz">info@goodwalk.co.nz</a>.</p>
</div>""")
text_body = (
f"Hi {first_name},\n\n"
f"Your Goodwalk account claim code is: {code}\n\n"
"This code expires in 15 minutes.\n\n"
"If you didn't request this, please ignore this email."
)
await send_email(to_address, subject, html_body, text_body)
async def send_login_2fa(to_address: str, first_name: str, code: str) -> None:
subject = "Your Goodwalk login code"
html_body = _base_html(f"""
<div class="body">
<p>Hi {first_name}!</p>
<p>Here's your one-time login code for the Goodwalk Members Area.
It expires in <strong>10 minutes</strong>.</p>
<div class="code-box">
<div class="code">{code}</div>
</div>
<p class="expiry">This code is valid for 10 minutes and can only be used once.</p>
<p>If you didn't try to log in, please contact us immediately at
<a href="mailto:info@goodwalk.co.nz">info@goodwalk.co.nz</a>.</p>
</div>""")
text_body = (
f"Hi {first_name},\n\n"
f"Your Goodwalk login code is: {code}\n\n"
"This code expires in 10 minutes.\n\n"
"If you didn't request this, please contact us immediately."
)
await send_email(to_address, subject, html_body, text_body)
async def send_onboarding_invite(to_address: str, first_name: str, magic_url: str) -> None:
subject = "You're invited to complete your Goodwalk onboarding"
html_body = _base_html(f"""
<div class="body">
<p>Hi {first_name}!</p>
<p>Thanks for getting in touch with Goodwalk. We've opened your onboarding invitation so you can complete your details and sign your service agreement.</p>
<p><a href="{magic_url}" style="display:inline-block;padding:12px 18px;border-radius:12px;background:#FFD100;color:#002842;text-decoration:none;font-weight:700;">Start onboarding &rarr;</a></p>
<p class="expiry">This link is valid for 7 days and can only be used once.</p>
<p>Once your onboarding is complete and your contract is signed, we'll activate your members account.</p>
<p>If you have any questions, reach us at <a href="mailto:info@goodwalk.co.nz">info@goodwalk.co.nz</a>.</p>
</div>""")
text_body = (
f"Hi {first_name},\n\n"
"We've opened your Goodwalk onboarding invitation.\n\n"
f"Click this link to get started (valid for 7 days):\n{magic_url}\n\n"
"Once your onboarding is complete and your contract is signed, we'll activate your members account.\n\n"
"Questions? Email info@goodwalk.co.nz"
)
await send_email(to_address, subject, html_body, text_body)
+251
View File
@@ -0,0 +1,251 @@
from datetime import datetime, timezone
from decimal import Decimal
from sqlalchemy import case, func, select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from app.experiments.registry import EXPERIMENT_REGISTRY
from app.models.experiment import Experiment, ExperimentEvent, ExperimentVariant
from app.schemas.experiments import (
ExperimentConversionCreate,
ExperimentDefinitionResponse,
ExperimentDefinitionUpdate,
ExperimentEventCreate,
ExperimentImpressionCreate,
ExperimentResult,
ExperimentVariantResult,
)
def experiment_exists(experiment_key: str, variant_key: str) -> bool:
definition = EXPERIMENT_REGISTRY.get(experiment_key)
if not definition:
return False
return any(variant["variant_key"] == variant_key for variant in definition["variants"])
async def sync_experiment_registry(db: AsyncSession) -> None:
result = await db.execute(
select(Experiment).options(selectinload(Experiment.variants))
)
existing = {experiment.experiment_key: experiment for experiment in result.scalars().all()}
for definition in EXPERIMENT_REGISTRY.values():
experiment = existing.get(definition["experiment_key"])
existing_variants: dict[str, ExperimentVariant] = {}
if experiment is None:
experiment = Experiment(
experiment_key=definition["experiment_key"],
cookie_name=definition["cookie_name"],
name=definition["name"],
description=definition.get("description"),
enabled=definition["enabled"],
eligible_routes=definition["eligible_routes"],
)
db.add(experiment)
await db.flush()
else:
existing_variants = {variant.variant_key: variant for variant in experiment.variants}
for variant_definition in definition["variants"]:
variant = existing_variants.get(variant_definition["variant_key"])
if variant is None:
db.add(
ExperimentVariant(
experiment_id=experiment.id,
variant_key=variant_definition["variant_key"],
label=variant_definition["label"],
allocation=variant_definition["allocation"],
is_control=variant_definition["is_control"],
)
)
continue
variant.label = variant_definition["label"]
variant.allocation = variant_definition["allocation"]
variant.is_control = variant_definition["is_control"]
await db.flush()
async def list_experiment_definitions(db: AsyncSession) -> list[ExperimentDefinitionResponse]:
result = await db.execute(
select(Experiment).options(selectinload(Experiment.variants)).order_by(Experiment.experiment_key)
)
experiments = result.scalars().all()
return [
ExperimentDefinitionResponse(
experiment_key=experiment.experiment_key,
cookie_name=experiment.cookie_name,
name=experiment.name,
description=experiment.description,
enabled=experiment.enabled,
eligible_routes=experiment.eligible_routes,
variants=[
{
"variant_key": variant.variant_key,
"label": variant.label,
"allocation": variant.allocation,
"is_control": variant.is_control,
}
for variant in experiment.variants
],
)
for experiment in experiments
]
async def record_experiment_event(
db: AsyncSession,
payload: ExperimentImpressionCreate | ExperimentEventCreate | ExperimentConversionCreate,
) -> ExperimentEvent:
conversion_value = getattr(payload, "conversion_value", None)
timestamp = payload.timestamp
if timestamp.tzinfo is None:
timestamp = timestamp.replace(tzinfo=timezone.utc)
event = ExperimentEvent(
experiment_key=payload.experiment_key,
variant_key=payload.variant_key,
session_id=payload.session_id,
user_id=payload.user_id,
path=payload.path,
event_type=payload.event_name,
conversion_value=conversion_value,
metadata_=payload.metadata,
created_at=timestamp.astimezone(timezone.utc).replace(tzinfo=None),
)
db.add(event)
await db.flush()
await db.refresh(event)
return event
async def get_experiment_results(db: AsyncSession, experiment_key: str | None = None) -> list[ExperimentResult]:
stmt = select(
ExperimentEvent.experiment_key,
ExperimentEvent.variant_key,
func.sum(case((ExperimentEvent.event_type == "impression", 1), else_=0)).label("impressions"),
func.sum(case((ExperimentEvent.event_type == "cta_click", 1), else_=0)).label("cta_clicks"),
func.sum(case((ExperimentEvent.event_type == "form_start", 1), else_=0)).label("form_starts"),
func.sum(case((ExperimentEvent.event_type == "form_submit", 1), else_=0)).label("form_submits"),
func.sum(case((ExperimentEvent.event_type == "conversion", 1), else_=0)).label("conversions"),
func.count(func.distinct(ExperimentEvent.session_id)).label("unique_sessions"),
func.coalesce(func.sum(ExperimentEvent.conversion_value), Decimal("0")).label("conversion_value_total"),
).group_by(ExperimentEvent.experiment_key, ExperimentEvent.variant_key).order_by(
ExperimentEvent.experiment_key,
ExperimentEvent.variant_key,
)
if experiment_key:
stmt = stmt.where(ExperimentEvent.experiment_key == experiment_key)
result = await db.execute(stmt)
rows = result.all()
grouped: dict[str, list[ExperimentVariantResult]] = {}
for row in rows:
impressions = int(row.impressions or 0)
conversions = int(row.conversions or 0)
conversion_rate = conversions / impressions if impressions else 0.0
grouped.setdefault(row.experiment_key, []).append(
ExperimentVariantResult(
variant_key=row.variant_key,
impressions=impressions,
cta_clicks=int(row.cta_clicks or 0),
form_starts=int(row.form_starts or 0),
form_submits=int(row.form_submits or 0),
conversions=conversions,
unique_sessions=int(row.unique_sessions or 0),
conversion_rate=round(conversion_rate, 4),
conversion_value_total=float(row.conversion_value_total or 0),
)
)
generated_at = datetime.now(timezone.utc)
return [
ExperimentResult(
experiment_key=key,
generated_at=generated_at,
variants=variants,
)
for key, variants in grouped.items()
]
async def get_experiment_definition(db: AsyncSession, experiment_key: str) -> Experiment | None:
result = await db.execute(
select(Experiment)
.options(selectinload(Experiment.variants))
.where(Experiment.experiment_key == experiment_key)
)
return result.scalars().first()
async def upsert_experiment_definition(
db: AsyncSession,
experiment_key: str,
payload: ExperimentDefinitionUpdate,
) -> Experiment:
experiment = await get_experiment_definition(db, experiment_key)
duplicate_cookie = await db.execute(
select(Experiment).where(
Experiment.cookie_name == payload.cookie_name,
Experiment.experiment_key != experiment_key,
)
)
if duplicate_cookie.scalars().first():
raise ValueError("cookie_name is already used by another experiment")
if experiment is None:
experiment = Experiment(
experiment_key=experiment_key,
cookie_name=payload.cookie_name,
name=payload.name,
description=payload.description,
enabled=payload.enabled,
eligible_routes=payload.eligible_routes,
)
db.add(experiment)
await db.flush()
existing_variants: dict[str, ExperimentVariant] = {}
else:
experiment.cookie_name = payload.cookie_name
experiment.name = payload.name
experiment.description = payload.description
experiment.enabled = payload.enabled
experiment.eligible_routes = payload.eligible_routes
existing_variants = {variant.variant_key: variant for variant in experiment.variants}
incoming_keys = {variant.variant_key for variant in payload.variants}
for variant in list(existing_variants.values()):
if variant.variant_key not in incoming_keys:
await db.delete(variant)
for variant_payload in payload.variants:
variant = existing_variants.get(variant_payload.variant_key)
if variant is None:
db.add(
ExperimentVariant(
experiment_id=experiment.id,
variant_key=variant_payload.variant_key,
label=variant_payload.label,
allocation=variant_payload.allocation,
is_control=variant_payload.is_control,
)
)
continue
variant.label = variant_payload.label
variant.allocation = variant_payload.allocation
variant.is_control = variant_payload.is_control
await db.flush()
refreshed = await get_experiment_definition(db, experiment_key)
assert refreshed is not None
return refreshed
+614
View File
@@ -0,0 +1,614 @@
import asyncio
import logging
import re
from copy import deepcopy
from dataclasses import dataclass
from datetime import date, datetime, timedelta, timezone
from zoneinfo import ZoneInfo
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import AsyncSessionLocal
from app.models.member import AdminMessage, Booking, Member, MemberNotificationDispatch, Walk
from app.models.settings import SiteSettings
from app.services.sections import get_section
from app.services.settings import get_feature_settings_snapshot, get_settings
logger = logging.getLogger(__name__)
NZ_TZ = ZoneInfo("Pacific/Auckland")
AUTOMATION_INTERVAL_SECONDS = 3600
SERVICE_LABELS = {
"pack_walk": "Pack Walk",
"1_1_walk": "1-1 Walk",
"puppy_visit": "Puppy Visit",
}
DEFAULT_SENT_BY = "Goodwalk"
DEFAULT_INVOICE_DAY = 1
MATARIKI_DATES = {
2025: date(2025, 6, 20),
2026: date(2026, 7, 10),
2027: date(2027, 6, 25),
2028: date(2028, 7, 14),
2029: date(2029, 7, 6),
2030: date(2030, 6, 21),
2031: date(2031, 7, 11),
2032: date(2032, 7, 2),
2033: date(2033, 6, 24),
2034: date(2034, 7, 7),
2035: date(2035, 6, 29),
}
AUTOMATIC_NOTIFICATION_SECTION_KEY = "notifications.automaticMessages"
PUBLIC_HOLIDAY_NOTIFICATION_SECTION_KEY = "notifications.publicHolidays"
INVOICE_REMINDER_NOTIFICATION_SECTION_KEY = "notifications.invoiceReminders"
TEMPLATE_TOKEN_PATTERN = re.compile(r"{{\s*([a-zA-Z0-9_]+)\s*}}")
DEFAULT_AUTOMATIC_NOTIFICATION_TEMPLATES = {
"member_activated": {
"subject": "Your Goodwalk members account is now active",
"body": "Your onboarding is complete and your full members area is now ready. You can log in to view bookings, messages, walks, and your contract.",
},
"booking_confirmed": {
"subject": "Your Goodwalk booking has been confirmed",
"body": "Your {{service_label}} booking for {{requested_date_label}} has been confirmed. If anything changes, we will message you here.",
},
"booking_rescheduled": {
"subject": "Your Goodwalk booking has been rescheduled",
"body": "Your {{service_label}} has been moved to {{requested_date_label}}. If you have any questions, please get in touch.",
},
"booking_cancelled": {
"subject": "Your Goodwalk booking has been cancelled",
"body": "Your {{service_label}} booking for {{requested_date_label}} has been cancelled. Please contact us if you would like to arrange another time.",
},
"walk_completed": {
"subject": "Your Goodwalk walk is complete",
"body": "{{member_first_name}}, your {{service_label}} on {{walked_on_label}} has been marked as complete.{{walk_notes_sentence}}",
},
}
DEFAULT_PUBLIC_HOLIDAY_NOTIFICATION_TEMPLATE = {
"subject": "Goodwalk public holiday update: {{holiday_name}}",
"body": "Today is {{holiday_name}} in New Zealand. If you were expecting service changes or slower replies today, this is why. We will confirm any booking adjustments directly in your messages.",
}
DEFAULT_INVOICE_REMINDER_NOTIFICATION_TEMPLATE = {
"subject": "Invoice reminder from Goodwalk",
"body": "A quick reminder that invoices are scheduled to go out on {{weekday_label}}. This week that falls on {{invoice_date_label}}.",
}
@dataclass
class NotificationSettingsSnapshot:
automatic_member_notifications_enabled: bool = True
nz_public_holiday_notifications_enabled: bool = True
invoice_reminder_notifications_enabled: bool = True
invoice_day_of_week: int = DEFAULT_INVOICE_DAY
admin_notifications_cleared_before: datetime | None = None
@dataclass
class NotificationRunSummary:
automatic_member_notifications_enabled: bool
public_holiday_messages_sent: int = 0
invoice_reminders_sent: int = 0
@dataclass
class NotificationTemplateSnapshot:
automatic_messages: dict[str, dict[str, str]]
public_holidays: dict[str, str]
invoice_reminders: dict[str, str]
def _service_label(service_type: str | None) -> str:
return SERVICE_LABELS.get(service_type or "", service_type or "service")
def _weekday_label(weekday: int) -> str:
labels = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"]
return labels[weekday] if 0 <= weekday < len(labels) else labels[DEFAULT_INVOICE_DAY]
def _format_nz_date(day: datetime | date, *, include_year: bool = False) -> str:
if isinstance(day, datetime):
local_day = day.astimezone(NZ_TZ)
base = local_day.date()
else:
base = day
month = base.strftime("%B")
weekday = base.strftime("%A")
if include_year:
return f"{weekday} {base.day} {month} {base.year}"
return f"{weekday} {base.day} {month}"
def _shift_single_holiday(day: date) -> date:
if day.weekday() == 5:
return day + timedelta(days=2)
if day.weekday() == 6:
return day + timedelta(days=1)
return day
def _observed_pair(day_one: date, day_two: date) -> list[date]:
observed: list[date] = []
for actual in [day_one, day_two]:
candidate = actual
if candidate.weekday() >= 5:
candidate += timedelta(days=7 - candidate.weekday())
while candidate in observed:
candidate += timedelta(days=1)
observed.append(candidate)
return observed
def _nth_weekday(year: int, month: int, weekday: int, occurrence: int) -> date:
first = date(year, month, 1)
offset = (weekday - first.weekday()) % 7
return first + timedelta(days=offset + (occurrence - 1) * 7)
def _easter_sunday(year: int) -> date:
a = year % 19
b = year // 100
c = year % 100
d = b // 4
e = b % 4
f = (b + 8) // 25
g = (b - f + 1) // 3
h = (19 * a + b - d - g + 15) % 30
i = c // 4
k = c % 4
l = (32 + 2 * e + 2 * i - h - k) % 7
m = (a + 11 * h + 22 * l) // 451
month = (h + l - 7 * m + 114) // 31
day = ((h + l - 7 * m + 114) % 31) + 1
return date(year, month, day)
def nz_public_holidays_for_year(year: int) -> dict[date, str]:
easter = _easter_sunday(year)
new_year, day_after = _observed_pair(date(year, 1, 1), date(year, 1, 2))
christmas, boxing = _observed_pair(date(year, 12, 25), date(year, 12, 26))
holidays = {
new_year: "New Year's Day",
day_after: "Day after New Year's Day",
_shift_single_holiday(date(year, 2, 6)): "Waitangi Day",
easter - timedelta(days=2): "Good Friday",
easter + timedelta(days=1): "Easter Monday",
_shift_single_holiday(date(year, 4, 25)): "ANZAC Day",
_nth_weekday(year, 6, 0, 1): "King's Birthday",
_nth_weekday(year, 10, 0, 4): "Labour Day",
christmas: "Christmas Day",
boxing: "Boxing Day",
}
matariki = MATARIKI_DATES.get(year)
if matariki is not None:
holidays[matariki] = "Matariki"
return holidays
def nz_public_holiday_name(day: date) -> str | None:
return nz_public_holidays_for_year(day.year).get(day)
def _normalize_template_text(value: object, fallback: str) -> str:
if isinstance(value, str):
trimmed = value.strip()
if trimmed:
return trimmed
return fallback
def _normalize_subject_body_template(data: object, fallback: dict[str, str]) -> dict[str, str]:
source = data if isinstance(data, dict) else {}
return {
"subject": _normalize_template_text(source.get("subject"), fallback["subject"]),
"body": _normalize_template_text(source.get("body"), fallback["body"]),
}
async def get_notification_template_snapshot(db: AsyncSession) -> NotificationTemplateSnapshot:
automatic_section = await get_section(db, AUTOMATIC_NOTIFICATION_SECTION_KEY)
automatic_templates = deepcopy(DEFAULT_AUTOMATIC_NOTIFICATION_TEMPLATES)
automatic_source = automatic_section.get("templates") if isinstance(automatic_section, dict) else {}
if isinstance(automatic_source, dict):
for template_key, fallback in DEFAULT_AUTOMATIC_NOTIFICATION_TEMPLATES.items():
automatic_templates[template_key] = _normalize_subject_body_template(
automatic_source.get(template_key),
fallback,
)
public_holiday_section = await get_section(db, PUBLIC_HOLIDAY_NOTIFICATION_SECTION_KEY)
invoice_reminder_section = await get_section(db, INVOICE_REMINDER_NOTIFICATION_SECTION_KEY)
return NotificationTemplateSnapshot(
automatic_messages=automatic_templates,
public_holidays=_normalize_subject_body_template(
public_holiday_section,
DEFAULT_PUBLIC_HOLIDAY_NOTIFICATION_TEMPLATE,
),
invoice_reminders=_normalize_subject_body_template(
invoice_reminder_section,
DEFAULT_INVOICE_REMINDER_NOTIFICATION_TEMPLATE,
),
)
def _render_template_text(template: str, context: dict[str, object]) -> str:
def replace(match: re.Match[str]) -> str:
return str(context.get(match.group(1), ""))
return TEMPLATE_TOKEN_PATTERN.sub(replace, template).strip()
def _render_subject_body_template(template: dict[str, str], context: dict[str, object]) -> tuple[str, str]:
return (
_render_template_text(template["subject"], context),
_render_template_text(template["body"], context),
)
async def get_notification_settings_snapshot(db: AsyncSession) -> NotificationSettingsSnapshot:
row = await get_settings(db)
if row is None:
return NotificationSettingsSnapshot()
return NotificationSettingsSnapshot(
automatic_member_notifications_enabled=row.automatic_member_notifications_enabled,
nz_public_holiday_notifications_enabled=row.nz_public_holiday_notifications_enabled,
invoice_reminder_notifications_enabled=row.invoice_reminder_notifications_enabled,
invoice_day_of_week=row.invoice_day_of_week,
admin_notifications_cleared_before=row.admin_notifications_cleared_before,
)
async def update_notification_settings_snapshot(
db: AsyncSession,
*,
automatic_member_notifications_enabled: bool | None = None,
nz_public_holiday_notifications_enabled: bool | None = None,
invoice_reminder_notifications_enabled: bool | None = None,
invoice_day_of_week: int | None = None,
admin_notifications_cleared_before: datetime | None = None,
) -> NotificationSettingsSnapshot:
row = await get_settings(db)
if row is None:
row = SiteSettings(site_name="")
db.add(row)
await db.flush()
if automatic_member_notifications_enabled is not None:
row.automatic_member_notifications_enabled = automatic_member_notifications_enabled
if nz_public_holiday_notifications_enabled is not None:
row.nz_public_holiday_notifications_enabled = nz_public_holiday_notifications_enabled
if invoice_reminder_notifications_enabled is not None:
row.invoice_reminder_notifications_enabled = invoice_reminder_notifications_enabled
if invoice_day_of_week is not None:
if invoice_day_of_week < 0 or invoice_day_of_week > 6:
raise ValueError("invoice_day_of_week must be between 0 and 6")
row.invoice_day_of_week = invoice_day_of_week
if admin_notifications_cleared_before is not None:
row.admin_notifications_cleared_before = admin_notifications_cleared_before
await db.flush()
await db.refresh(row)
return await get_notification_settings_snapshot(db)
async def create_member_message(
db: AsyncSession,
*,
member: Member,
subject: str,
body: str,
sent_by: str = DEFAULT_SENT_BY,
automatic: bool = False,
dispatch_key: str | None = None,
notification_type: str | None = None,
respect_preferences: bool = True,
) -> AdminMessage | None:
feature_settings = await get_feature_settings_snapshot(db)
if not feature_settings.messages_enabled:
return None
if respect_preferences and not member.notifications_enabled:
return None
if automatic:
settings = await get_notification_settings_snapshot(db)
if not settings.automatic_member_notifications_enabled:
return None
if dispatch_key is not None:
existing = await db.execute(
select(MemberNotificationDispatch).where(
MemberNotificationDispatch.member_id == member.id,
MemberNotificationDispatch.dispatch_key == dispatch_key,
)
)
if existing.scalars().first() is not None:
return None
db.add(
MemberNotificationDispatch(
member_id=member.id,
notification_type=notification_type or "notification",
dispatch_key=dispatch_key,
metadata_json={"automatic": automatic},
)
)
message = AdminMessage(
member_id=member.id,
subject=subject,
body=body,
sent_by=sent_by,
)
db.add(message)
await db.flush()
return message
async def send_account_activated_notification(
db: AsyncSession,
member: Member,
*,
templates: NotificationTemplateSnapshot | None = None,
) -> AdminMessage | None:
templates = templates or await get_notification_template_snapshot(db)
subject, body = _render_subject_body_template(
templates.automatic_messages["member_activated"],
{
"member_first_name": member.first_name,
"member_last_name": member.last_name,
},
)
return await create_member_message(
db,
member=member,
subject=subject,
body=body,
automatic=True,
dispatch_key=f"member_activated:{member.id}:{member.activated_at.isoformat() if member.activated_at else 'pending'}",
notification_type="member_activated",
)
async def send_booking_status_notification(
db: AsyncSession,
member: Member,
booking: Booking,
*,
templates: NotificationTemplateSnapshot | None = None,
) -> AdminMessage | None:
if booking.status not in {"confirmed", "cancelled"}:
return None
templates = templates or await get_notification_template_snapshot(db)
requested = _format_nz_date(booking.requested_date, include_year=True) if booking.requested_date else "the requested date"
service_name = _service_label(booking.service_type)
if booking.status == "confirmed":
template = templates.automatic_messages["booking_confirmed"]
else:
template = templates.automatic_messages["booking_cancelled"]
subject, body = _render_subject_body_template(
template,
{
"member_first_name": member.first_name,
"member_last_name": member.last_name,
"service_label": service_name,
"requested_date_label": requested,
"booking_status": booking.status,
},
)
return await create_member_message(
db,
member=member,
subject=subject,
body=body,
automatic=True,
dispatch_key=f"booking_status:{booking.id}:{booking.status}",
notification_type="booking_status",
)
async def send_booking_rescheduled_notification(
db: AsyncSession,
member: Member,
booking: Booking,
*,
templates: NotificationTemplateSnapshot | None = None,
) -> AdminMessage | None:
templates = templates or await get_notification_template_snapshot(db)
requested = _format_nz_date(booking.requested_date, include_year=True) if booking.requested_date else "a new date"
service_name = _service_label(booking.service_type)
subject, body = _render_subject_body_template(
templates.automatic_messages["booking_rescheduled"],
{
"member_first_name": member.first_name,
"member_last_name": member.last_name,
"service_label": service_name,
"requested_date_label": requested,
},
)
return await create_member_message(
db,
member=member,
subject=subject,
body=body,
automatic=True,
dispatch_key=f"booking_rescheduled:{booking.id}:{booking.requested_date.isoformat() if booking.requested_date else 'none'}",
notification_type="booking_rescheduled",
)
async def send_walk_completed_notification(
db: AsyncSession,
member: Member,
walk: Walk,
*,
templates: NotificationTemplateSnapshot | None = None,
) -> AdminMessage | None:
templates = templates or await get_notification_template_snapshot(db)
walked_on = _format_nz_date(walk.walked_at)
service_name = _service_label(walk.service_type)
subject, body = _render_subject_body_template(
templates.automatic_messages["walk_completed"],
{
"member_first_name": member.first_name,
"member_last_name": member.last_name,
"service_label": service_name,
"walked_on_label": walked_on,
"walk_notes": walk.notes or "",
"walk_notes_sentence": f" Notes from the team: {walk.notes}" if walk.notes else "",
},
)
return await create_member_message(
db,
member=member,
subject=subject,
body=body,
automatic=True,
dispatch_key=f"walk_completed:{walk.id}",
notification_type="walk_completed",
)
async def send_public_holiday_notification(
db: AsyncSession,
member: Member,
holiday_date: date,
holiday_name: str,
*,
templates: NotificationTemplateSnapshot | None = None,
) -> AdminMessage | None:
templates = templates or await get_notification_template_snapshot(db)
subject, body = _render_subject_body_template(
templates.public_holidays,
{
"member_first_name": member.first_name,
"member_last_name": member.last_name,
"holiday_name": holiday_name,
"holiday_date_label": _format_nz_date(holiday_date, include_year=True),
},
)
return await create_member_message(
db,
member=member,
subject=subject,
body=body,
automatic=True,
dispatch_key=f"public_holiday:{holiday_date.isoformat()}",
notification_type="public_holiday",
)
async def send_invoice_day_notification(
db: AsyncSession,
member: Member,
invoice_date: date,
weekday_label: str,
*,
templates: NotificationTemplateSnapshot | None = None,
) -> AdminMessage | None:
templates = templates or await get_notification_template_snapshot(db)
subject, body = _render_subject_body_template(
templates.invoice_reminders,
{
"member_first_name": member.first_name,
"member_last_name": member.last_name,
"weekday_label": weekday_label,
"invoice_date_label": _format_nz_date(invoice_date),
},
)
return await create_member_message(
db,
member=member,
subject=subject,
body=body,
automatic=True,
dispatch_key=f"invoice_reminder:{invoice_date.isoformat()}",
notification_type="invoice_reminder",
)
async def run_automatic_notifications(
db: AsyncSession,
*,
now: datetime | None = None,
) -> NotificationRunSummary:
settings = await get_notification_settings_snapshot(db)
summary = NotificationRunSummary(
automatic_member_notifications_enabled=settings.automatic_member_notifications_enabled,
)
if not settings.automatic_member_notifications_enabled:
return summary
templates = await get_notification_template_snapshot(db)
local_now = (now or datetime.now(timezone.utc)).astimezone(NZ_TZ)
local_date = local_now.date()
result = await db.execute(
select(Member).where(
Member.is_active == True, # noqa: E712
Member.member_status == "active",
Member.notifications_enabled == True, # noqa: E712
)
)
members = result.scalars().all()
if settings.nz_public_holiday_notifications_enabled:
holiday_name = nz_public_holiday_name(local_date)
if holiday_name:
for member in members:
created = await send_public_holiday_notification(
db,
member,
local_date,
holiday_name,
templates=templates,
)
if created is not None:
summary.public_holiday_messages_sent += 1
if settings.invoice_reminder_notifications_enabled and local_date.weekday() == settings.invoice_day_of_week:
weekday_label = _weekday_label(settings.invoice_day_of_week)
for member in members:
created = await send_invoice_day_notification(
db,
member,
local_date,
weekday_label,
templates=templates,
)
if created is not None:
summary.invoice_reminders_sent += 1
return summary
async def notification_automation_loop() -> None:
while True:
try:
async with AsyncSessionLocal() as session:
await run_automatic_notifications(session)
await session.commit()
except asyncio.CancelledError:
raise
except Exception:
logger.exception("Automatic member notification loop failed.")
await asyncio.sleep(AUTOMATION_INTERVAL_SECONDS)
+73
View File
@@ -0,0 +1,73 @@
"""
Service layer for Page CRUD operations.
All DB queries are async; HTML body is sanitized on write.
"""
import nh3
from typing import Optional
from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.page import Page
from app.schemas.page import PageCreate, PageUpdate
def _sanitize_body(body: str) -> str:
"""Strip dangerous HTML tags/attributes using nh3."""
return nh3.clean(body)
async def get_published_pages(db: AsyncSession) -> list[Page]:
result = await db.execute(
select(Page).where(Page.published == True).order_by(Page.created_at.desc())
)
return list(result.scalars().all())
async def get_page_by_slug(db: AsyncSession, slug: str, published_only: bool = True) -> Optional[Page]:
stmt = select(Page).where(Page.slug == slug)
if published_only:
stmt = stmt.where(Page.published == True)
result = await db.execute(stmt)
return result.scalars().first()
async def create_page(db: AsyncSession, data: PageCreate) -> Page:
page = Page(
title=data.title,
slug=data.slug,
body=_sanitize_body(data.body),
meta_title=data.meta_title,
meta_description=data.meta_description,
og_image_url=data.og_image_url,
published=data.published,
)
db.add(page)
await db.flush()
await db.refresh(page)
return page
async def update_page(db: AsyncSession, slug: str, data: PageUpdate) -> Optional[Page]:
page = await get_page_by_slug(db, slug, published_only=False)
if page is None:
return None
update_data = data.model_dump(exclude_unset=True)
if "body" in update_data and update_data["body"] is not None:
update_data["body"] = _sanitize_body(update_data["body"])
for field, value in update_data.items():
setattr(page, field, value)
await db.flush()
await db.refresh(page)
return page
async def delete_page(db: AsyncSession, slug: str) -> bool:
page = await get_page_by_slug(db, slug, published_only=False)
if page is None:
return False
await db.delete(page)
await db.flush()
return True
+99
View File
@@ -0,0 +1,99 @@
"""
Service layer for BlogPost CRUD operations.
"""
import math
import nh3
from typing import Optional
from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.post import BlogPost
from app.schemas.post import PostCreate, PostUpdate, PaginatedPostsResponse, PostResponse
def _sanitize_body(body: str) -> str:
"""Strip dangerous HTML tags/attributes using nh3."""
return nh3.clean(body)
async def get_published_posts(
db: AsyncSession, page: int = 1, per_page: int = 10
) -> PaginatedPostsResponse:
offset = (page - 1) * per_page
count_result = await db.execute(
select(func.count()).select_from(BlogPost).where(BlogPost.published == True)
)
total = count_result.scalar_one()
result = await db.execute(
select(BlogPost)
.where(BlogPost.published == True)
.order_by(BlogPost.created_at.desc())
.offset(offset)
.limit(per_page)
)
items = list(result.scalars().all())
total_pages = math.ceil(total / per_page) if per_page > 0 else 0
return PaginatedPostsResponse(
items=[PostResponse.model_validate(p) for p in items],
total=total,
page=page,
per_page=per_page,
total_pages=total_pages,
)
async def get_post_by_slug(
db: AsyncSession, slug: str, published_only: bool = True
) -> Optional[BlogPost]:
stmt = select(BlogPost).where(BlogPost.slug == slug)
if published_only:
stmt = stmt.where(BlogPost.published == True)
result = await db.execute(stmt)
return result.scalars().first()
async def create_post(db: AsyncSession, data: PostCreate) -> BlogPost:
post = BlogPost(
title=data.title,
slug=data.slug,
excerpt=data.excerpt,
body=_sanitize_body(data.body),
author=data.author,
featured_image_url=data.featured_image_url,
tags=data.tags,
published=data.published,
)
db.add(post)
await db.flush()
await db.refresh(post)
return post
async def update_post(db: AsyncSession, slug: str, data: PostUpdate) -> Optional[BlogPost]:
post = await get_post_by_slug(db, slug, published_only=False)
if post is None:
return None
update_data = data.model_dump(exclude_unset=True)
if "body" in update_data and update_data["body"] is not None:
update_data["body"] = _sanitize_body(update_data["body"])
for field, value in update_data.items():
setattr(post, field, value)
await db.flush()
await db.refresh(post)
return post
async def delete_post(db: AsyncSession, slug: str) -> bool:
post = await get_post_by_slug(db, slug, published_only=False)
if post is None:
return False
await db.delete(post)
await db.flush()
return True
+80
View File
@@ -0,0 +1,80 @@
from copy import deepcopy
SERVICE_PRICING_DEFAULTS = {
"pack_walk": {
"label": "Pack Walk",
"amount": 58.0,
"unit": "per walk",
},
"1_1_walk": {
"label": "1-1 Walk",
"amount": 45.0,
"unit": "per walk",
},
"puppy_visit": {
"label": "Puppy Visit",
"amount": 39.0,
"unit": "per visit",
},
}
def default_service_pricing() -> dict[str, dict[str, float | str]]:
return deepcopy(SERVICE_PRICING_DEFAULTS)
def normalize_service_pricing(data: object | None) -> dict[str, dict[str, float | str]]:
normalized = default_service_pricing()
source = data if isinstance(data, dict) else {}
for service_type, defaults in normalized.items():
candidate = source.get(service_type) if isinstance(source, dict) else None
if not isinstance(candidate, dict):
continue
amount = candidate.get("amount")
try:
parsed_amount = round(float(amount), 2)
except (TypeError, ValueError):
parsed_amount = defaults["amount"]
if parsed_amount < 0:
parsed_amount = defaults["amount"]
unit = candidate.get("unit")
label = candidate.get("label")
normalized[service_type] = {
"label": label.strip() if isinstance(label, str) and label.strip() else defaults["label"],
"amount": parsed_amount,
"unit": unit.strip() if isinstance(unit, str) and unit.strip() else defaults["unit"],
}
return normalized
def normalize_service_pricing_overrides(data: object | None) -> dict[str, float]:
if not isinstance(data, dict):
return {}
normalized: dict[str, float] = {}
for service_type in SERVICE_PRICING_DEFAULTS:
if service_type not in data:
continue
value = data.get(service_type)
if value in (None, ""):
continue
try:
parsed = round(float(value), 2)
except (TypeError, ValueError):
continue
if parsed < 0:
continue
normalized[service_type] = parsed
return normalized
+26
View File
@@ -0,0 +1,26 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.models.section import ContentSection
async def get_section(db: AsyncSession, key: str) -> dict | None:
result = await db.execute(select(ContentSection).where(ContentSection.key == key))
row = result.scalar_one_or_none()
return row.data if row else None
async def upsert_section(db: AsyncSession, key: str, data: dict) -> ContentSection:
result = await db.execute(select(ContentSection).where(ContentSection.key == key))
row = result.scalar_one_or_none()
if row:
row.data = data
else:
row = ContentSection(key=key, data=data)
db.add(row)
await db.flush()
return row
async def list_sections(db: AsyncSession) -> list[dict]:
result = await db.execute(select(ContentSection).order_by(ContentSection.key))
return [{"key": r.key, "updated_at": r.updated_at.isoformat()} for r in result.scalars()]
+173
View File
@@ -0,0 +1,173 @@
"""
Service layer for SiteSettings singleton.
Uses get-or-create pattern; only one row should ever exist.
"""
from dataclasses import dataclass
from typing import Optional
from sqlalchemy import inspect as sa_inspect, select
from sqlalchemy.orm import load_only
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.settings import SiteSettings
from app.schemas.settings import FeatureSettingsUpdate, SiteSettingsUpdate
from app.services.pricing import default_service_pricing, normalize_service_pricing
@dataclass(frozen=True)
class FeatureSettingsSnapshot:
bookings_enabled: bool = True
walks_enabled: bool = True
messages_enabled: bool = True
two_factor_enabled: bool = True
audit_history_enabled: bool = True
experiments_enabled: bool = True
class FeatureSettingsSchemaOutdatedError(RuntimeError):
"""Raised when feature settings are requested against an older schema."""
class ServicePricingSchemaOutdatedError(RuntimeError):
"""Raised when service pricing is requested against an older schema."""
async def _get_site_settings_column_names(db: AsyncSession) -> set[str]:
connection = await db.connection()
def _load_columns(sync_connection):
inspector = sa_inspect(sync_connection)
return {column["name"] for column in inspector.get_columns("site_settings")}
return await connection.run_sync(_load_columns)
async def get_settings(db: AsyncSession, *, existing_columns: set[str] | None = None) -> Optional[SiteSettings]:
column_names = existing_columns or await _get_site_settings_column_names(db)
loadable_fields = [
getattr(SiteSettings, column_name)
for column_name in column_names
if hasattr(SiteSettings, column_name)
]
statement = select(SiteSettings).limit(1)
if loadable_fields:
statement = statement.options(load_only(*loadable_fields))
result = await db.execute(statement)
return result.scalars().first()
async def upsert_settings(db: AsyncSession, data: SiteSettingsUpdate) -> SiteSettings:
settings_row = await get_settings(db)
if settings_row is None:
# Create with defaults + provided values
init_data = {
"site_name": "",
"tagline": None,
"logo_url": None,
"footer_text": None,
"social_links": {},
"automatic_member_notifications_enabled": True,
"nz_public_holiday_notifications_enabled": True,
"invoice_reminder_notifications_enabled": True,
"invoice_day_of_week": 1,
"admin_notifications_cleared_before": None,
"bookings_enabled": True,
"walks_enabled": True,
"messages_enabled": True,
"two_factor_enabled": True,
"audit_history_enabled": True,
"experiments_enabled": True,
"service_pricing": default_service_pricing(),
}
update_data = data.model_dump(exclude_unset=True)
init_data.update(update_data)
settings_row = SiteSettings(**init_data)
db.add(settings_row)
else:
update_data = data.model_dump(exclude_unset=True)
for field, value in update_data.items():
setattr(settings_row, field, value)
await db.flush()
await db.refresh(settings_row)
return settings_row
async def get_feature_settings_snapshot(db: AsyncSession) -> FeatureSettingsSnapshot:
existing_columns = await _get_site_settings_column_names(db)
row = await get_settings(db, existing_columns=existing_columns)
if row is None:
return FeatureSettingsSnapshot()
return FeatureSettingsSnapshot(
bookings_enabled=getattr(row, "bookings_enabled", True) if "bookings_enabled" in existing_columns else True,
walks_enabled=getattr(row, "walks_enabled", True) if "walks_enabled" in existing_columns else True,
messages_enabled=getattr(row, "messages_enabled", True) if "messages_enabled" in existing_columns else True,
two_factor_enabled=getattr(row, "two_factor_enabled", True) if "two_factor_enabled" in existing_columns else True,
audit_history_enabled=getattr(row, "audit_history_enabled", True) if "audit_history_enabled" in existing_columns else True,
experiments_enabled=getattr(row, "experiments_enabled", True) if "experiments_enabled" in existing_columns else True,
)
async def update_feature_settings_snapshot(
db: AsyncSession,
data: FeatureSettingsUpdate,
) -> FeatureSettingsSnapshot:
existing_columns = await _get_site_settings_column_names(db)
requested_fields = set(data.model_dump(exclude_unset=True).keys())
missing_columns = sorted(field for field in requested_fields if field not in existing_columns)
if missing_columns:
raise FeatureSettingsSchemaOutdatedError(
"Feature toggle columns are missing from site_settings. Run alembic upgrade head."
)
row = await get_settings(db, existing_columns=existing_columns)
if row is None:
row = SiteSettings(site_name="")
db.add(row)
await db.flush()
update_data = data.model_dump(exclude_unset=True)
for field, value in update_data.items():
setattr(row, field, value)
await db.flush()
await db.refresh(row)
return await get_feature_settings_snapshot(db)
async def get_service_pricing_snapshot(db: AsyncSession) -> dict[str, dict[str, float | str]]:
existing_columns = await _get_site_settings_column_names(db)
row = await get_settings(db, existing_columns=existing_columns)
if row is None:
return default_service_pricing()
if "service_pricing" not in existing_columns:
return default_service_pricing()
return normalize_service_pricing(getattr(row, "service_pricing", None))
async def update_service_pricing_snapshot(
db: AsyncSession,
*,
service_pricing: dict,
) -> dict[str, dict[str, float | str]]:
existing_columns = await _get_site_settings_column_names(db)
if "service_pricing" not in existing_columns:
raise ServicePricingSchemaOutdatedError(
"Service pricing columns are missing from site_settings. Run alembic upgrade head."
)
row = await get_settings(db, existing_columns=existing_columns)
if row is None:
row = SiteSettings(site_name="", service_pricing=default_service_pricing())
db.add(row)
await db.flush()
row.service_pricing = normalize_service_pricing(service_pricing)
await db.flush()
await db.refresh(row)
return await get_service_pricing_snapshot(db)
+774
View File
@@ -0,0 +1,774 @@
"""
Goodwalk Backend CLI
--------------------
A rich management interface for the Goodwalk CMS API.
Usage (from backend/ directory):
python cli.py — show help
python cli.py dev — start dev server
python cli.py migrate — run Alembic migrations
python cli.py seed — seed admin user + sample CMS content
python cli.py seed-content — seed site content from data/content.json
python cli.py status — show DB connection + table row counts
python cli.py create-admin — create a new admin user interactively
python cli.py routes — list all registered API routes
"""
import asyncio
import os
import sys
from pathlib import Path
import typer
from rich import box
from rich.console import Console
from rich.panel import Panel
from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, TaskProgressColumn
from rich.prompt import Prompt, Confirm
from rich.table import Table
from rich.text import Text
from rich import print as rprint
# ── Setup ─────────────────────────────────────────────────────────────────────
app = typer.Typer(
name="goodwalk",
help="[bold green]Goodwalk CMS API[/bold green] — management CLI",
add_completion=False,
rich_markup_mode="rich",
)
console = Console()
BANNER = """
[bold green]
██████╗ ██████╗ ██████╗ ██████╗ ██╗ ██╗ █████╗ ██╗ ██╗ ██╗
██╔════╝ ██╔═══██╗██╔═══██╗██╔══██╗██║ ██║██╔══██╗██║ ██║ ██╔╝
██║ ███╗██║ ██║██║ ██║██║ ██║██║ █╗ ██║███████║██║ █████╔╝
██║ ██║██║ ██║██║ ██║██║ ██║██║███╗██║██╔══██║██║ ██╔═██╗
╚██████╔╝╚██████╔╝╚██████╔╝██████╔╝╚███╔███╔╝██║ ██║███████╗██║ ██╗
╚═════╝ ╚═════╝ ╚═════╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝
[/bold green][dim] CMS API · FastAPI · PostgreSQL · JWT Auth[/dim]
"""
def _print_banner():
console.print(BANNER)
def _load_settings():
"""Import app settings (triggers .env load)."""
try:
from app.config import settings
return settings
except Exception as e:
console.print(f"[bold red]✗ Failed to load settings:[/bold red] {e}")
raise typer.Exit(1)
def _make_engine(settings):
from sqlalchemy.ext.asyncio import create_async_engine
return create_async_engine(settings.DATABASE_URL, echo=False)
# ── dev ───────────────────────────────────────────────────────────────────────
@app.command()
def dev(
host: str = typer.Option("127.0.0.1", "--host", "-h", help="Bind host"),
port: int = typer.Option(8000, "--port", "-p", help="Bind port"),
reload: bool = typer.Option(True, "--reload/--no-reload", help="Enable auto-reload"),
):
"""
[bold]Start the development server.[/bold]
Launches Uvicorn with hot-reload enabled. Reads configuration from [cyan].env[/cyan].
"""
_print_banner()
settings = _load_settings()
info = Table.grid(padding=(0, 2))
info.add_column(style="dim")
info.add_column(style="bold cyan")
info.add_row("API URL", f"http://{host}:{port}")
info.add_row("Docs", f"http://{host}:{port}/docs")
info.add_row("Redoc", f"http://{host}:{port}/redoc")
info.add_row("Health", f"http://{host}:{port}/health")
info.add_row("Database", settings.DATABASE_URL.split("@")[-1] if "@" in settings.DATABASE_URL else settings.DATABASE_URL)
info.add_row("CORS Origins", settings.ALLOWED_ORIGINS)
info.add_row("Auto-reload", "[green]on[/green]" if reload else "[yellow]off[/yellow]")
console.print(Panel(info, title="[bold green]Starting Goodwalk CMS API[/bold green]", border_style="green"))
console.print()
import uvicorn
uvicorn.run(
"app.main:app",
host=host,
port=port,
reload=reload,
log_level="warning", # suppress uvicorn's own access log — we use RequestLogMiddleware
access_log=False,
)
# ── migrate ───────────────────────────────────────────────────────────────────
@app.command()
def migrate(
revision: str = typer.Argument("head", help="Alembic revision target (default: head)"),
autogenerate: bool = typer.Option(False, "--autogenerate", "-a", help="Generate a new migration"),
message: str = typer.Option("auto", "--message", "-m", help="Migration message (used with --autogenerate)"),
):
"""
[bold]Run database migrations via Alembic.[/bold]
By default runs [cyan]alembic upgrade head[/cyan].
Pass [cyan]--autogenerate[/cyan] to generate a new migration from model changes.
"""
_print_banner()
console.print(Panel("[bold]Running Alembic migrations[/bold]", border_style="cyan"))
import subprocess
if autogenerate:
cmd = [sys.executable, "-m", "alembic", "revision", "--autogenerate", "-m", message]
label = f"Generating migration: [cyan]{message}[/cyan]"
else:
cmd = [sys.executable, "-m", "alembic", "upgrade", revision]
label = f"Upgrading to [cyan]{revision}[/cyan]"
with Progress(
SpinnerColumn(),
TextColumn("[progress.description]{task.description}"),
console=console,
) as progress:
task = progress.add_task(label, total=None)
result = subprocess.run(cmd, capture_output=True, text=True)
progress.update(task, completed=True)
if result.returncode == 0:
console.print(f"\n[bold green]✓ Migration complete[/bold green]")
if result.stdout.strip():
console.print(f"[dim]{result.stdout.strip()}[/dim]")
else:
console.print(f"\n[bold red]✗ Migration failed[/bold red]")
console.print(f"[red]{result.stderr.strip()}[/red]")
raise typer.Exit(1)
# ── seed ──────────────────────────────────────────────────────────────────────
@app.command()
def seed():
"""
[bold]Seed the database with a default admin user and sample content.[/bold]
Creates:
• Admin user [cyan]admin@example.com[/cyan] / [cyan]changeme123[/cyan]
• Sample Page, BlogPost, and SiteSettings rows
• All site content sections from [cyan]data/content.json[/cyan]
"""
_print_banner()
console.print(Panel("[bold]Seeding database[/bold]", border_style="cyan"))
asyncio.run(_seed_async())
async def _seed_async():
settings = _load_settings()
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
from sqlalchemy import select, text
from app.models import Base, User, Page, BlogPost, SiteSettings, ContentSection
from app.auth.password import hash_password
import json
engine = _make_engine(settings)
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
Session = async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False)
results = []
async with Session() as session:
# ── Admin user ──────────────────────────────────────────────────────
existing = await session.execute(select(User).where(User.email == "admin@example.com"))
if existing.scalar_one_or_none() is None:
session.add(User(
email="admin@example.com",
hashed_password=hash_password("changeme123"),
is_active=True,
))
results.append(("[green]created[/green]", "User", "admin@example.com"))
else:
results.append(("[yellow]exists[/yellow] ", "User", "admin@example.com"))
# ── Sample Page ─────────────────────────────────────────────────────
existing = await session.execute(select(Page).where(Page.slug == "home"))
if existing.scalar_one_or_none() is None:
session.add(Page(
title="Home",
slug="home",
body="<h1>Welcome to Goodwalk</h1><p>Professional dog walking services across Auckland Central.</p>",
meta_title="Goodwalk Auckland Dog Walking Service",
meta_description="Trusted, professional dog walking services across Auckland Central.",
published=True,
))
results.append(("[green]created[/green]", "Page", "home"))
else:
results.append(("[yellow]exists[/yellow] ", "Page", "home"))
# ── Sample BlogPost ─────────────────────────────────────────────────
existing = await session.execute(select(BlogPost).where(BlogPost.slug == "hello-world"))
if existing.scalar_one_or_none() is None:
session.add(BlogPost(
title="Welcome to the Goodwalk Blog",
slug="hello-world",
excerpt="Our first blog post — introducing Goodwalk and the Tiny Gang.",
body="<p>Welcome to the Goodwalk blog! We'll be sharing updates, tips, and stories from the Tiny Gang.</p>",
author="Alessandra",
tags=["news", "welcome"],
published=True,
))
results.append(("[green]created[/green]", "BlogPost", "hello-world"))
else:
results.append(("[yellow]exists[/yellow] ", "BlogPost", "hello-world"))
# ── SiteSettings ────────────────────────────────────────────────────
existing = await session.execute(select(SiteSettings).limit(1))
if existing.scalar_one_or_none() is None:
session.add(SiteSettings(
site_name="Goodwalk",
tagline="Unleashing Fun in Your Dog's Day!",
logo_url="/images/logo-v6.png",
footer_text="© 2026 Goodwalk. All rights reserved.",
social_links={
"instagram": "https://www.instagram.com/goodwalk.nz",
"facebook": "https://www.facebook.com/goodwalk.nz",
"google": "https://g.page/goodwalk",
},
))
results.append(("[green]created[/green]", "SiteSettings", "singleton"))
else:
results.append(("[yellow]exists[/yellow] ", "SiteSettings", "singleton"))
# ── Content sections from content.json ──────────────────────────────
content_file = Path(__file__).parent / "data" / "content.json"
if content_file.exists():
with open(content_file, encoding="utf-8") as f:
content = json.load(f)
pages_data = content.get("pages", {})
sections = {
"siteSettings": content.get("siteSettings", {}),
"navigation": content.get("navigation", {}),
"footer": content.get("footer", {}),
"testimonials": content.get("testimonials", []),
"pages.home": pages_data.get("home", {}),
"pages.packWalks": pages_data.get("packWalks", {}),
"pages.oneOnOneWalks": pages_data.get("oneOnOneWalks", {}),
"pages.puppyVisits": pages_data.get("puppyVisits", {}),
"pages.pricing": pages_data.get("pricing", {}),
"pages.about": pages_data.get("about", {}),
"pages.contact": pages_data.get("contact", {}),
}
for key, data in sections.items():
existing = await session.execute(
select(ContentSection).where(ContentSection.key == key)
)
row = existing.scalar_one_or_none()
if row is None:
session.add(ContentSection(key=key, data=data))
results.append(("[green]created[/green]", "Section", key))
else:
row.data = data
results.append(("[cyan]updated[/cyan] ", "Section", key))
else:
console.print(f" [yellow]⚠ content.json not found at {content_file} — skipping sections[/yellow]")
await session.commit()
await engine.dispose()
# ── Results table ────────────────────────────────────────────────────────
table = Table(box=box.ROUNDED, show_header=True, border_style="green")
table.add_column("Status", style="bold", width=12)
table.add_column("Type", style="cyan", width=16)
table.add_column("Key / Identifier")
for status, type_, key in results:
table.add_row(status, type_, key)
console.print()
console.print(table)
console.print(f"\n[bold green]✓ Seed complete[/bold green] — {len(results)} records processed\n")
# ── seed-content ──────────────────────────────────────────────────────────────
@app.command(name="seed-content")
def seed_content():
"""
[bold]Re-seed only the content sections from data/content.json.[/bold]
Upserts all page sections, navigation, footer, testimonials, and settings.
Safe to run multiple times — existing rows are updated, not duplicated.
"""
_print_banner()
console.print(Panel("[bold]Seeding content sections from data/content.json[/bold]", border_style="cyan"))
asyncio.run(_seed_content_async())
async def _seed_content_async():
import json
settings = _load_settings()
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
from sqlalchemy import select
from app.models import Base, ContentSection
content_file = Path(__file__).parent / "data" / "content.json"
if not content_file.exists():
console.print(f"[bold red]✗ Not found:[/bold red] {content_file}")
raise typer.Exit(1)
with open(content_file, encoding="utf-8") as f:
content = json.load(f)
pages_data = content.get("pages", {})
sections = {
"siteSettings": content.get("siteSettings", {}),
"navigation": content.get("navigation", {}),
"footer": content.get("footer", {}),
"testimonials": content.get("testimonials", []),
"pages.home": pages_data.get("home", {}),
"pages.packWalks": pages_data.get("packWalks", {}),
"pages.oneOnOneWalks": pages_data.get("oneOnOneWalks", {}),
"pages.puppyVisits": pages_data.get("puppyVisits", {}),
"pages.pricing": pages_data.get("pricing", {}),
"pages.about": pages_data.get("about", {}),
"pages.contact": pages_data.get("contact", {}),
}
engine = _make_engine(settings)
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
Session = async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False)
with Progress(
SpinnerColumn(),
TextColumn("[progress.description]{task.description}"),
BarColumn(),
TaskProgressColumn(),
console=console,
) as progress:
task = progress.add_task("Upserting sections...", total=len(sections))
results = []
async with Session() as session:
for key, data in sections.items():
existing = await session.execute(
select(ContentSection).where(ContentSection.key == key)
)
row = existing.scalar_one_or_none()
if row is None:
session.add(ContentSection(key=key, data=data))
results.append(("[green]inserted[/green]", key))
else:
row.data = data
results.append(("[cyan]updated[/cyan] ", key))
progress.advance(task)
await session.commit()
await engine.dispose()
table = Table(box=box.SIMPLE, show_header=True, border_style="dim")
table.add_column("Status", width=10)
table.add_column("Section key")
for status, key in results:
table.add_row(status, key)
console.print()
console.print(table)
console.print(f"[bold green]✓ Done[/bold green] — {len(results)} sections seeded\n")
# ── status ────────────────────────────────────────────────────────────────────
@app.command()
def status():
"""
[bold]Show database connection status and table row counts.[/bold]
Verifies the connection and prints a summary of all CMS data currently stored.
"""
_print_banner()
asyncio.run(_status_async())
async def _status_async():
settings = _load_settings()
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
from sqlalchemy import select, func, text
from app.models import User, Page, BlogPost, SiteSettings, ContentSection
db_display = settings.DATABASE_URL.split("@")[-1] if "@" in settings.DATABASE_URL else settings.DATABASE_URL
engine = _make_engine(settings)
Session = async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False)
with Progress(SpinnerColumn(), TextColumn("Connecting to database..."), console=console) as p:
p.add_task("", total=None)
try:
async with Session() as session:
await session.execute(text("SELECT 1"))
connected = True
except Exception as e:
connected = False
err = str(e)
if not connected:
console.print(Panel(
f"[bold red]✗ Cannot connect[/bold red]\n[dim]{db_display}[/dim]\n\n[red]{err}[/red]",
title="Database Status",
border_style="red",
))
raise typer.Exit(1)
# Row counts
async with Session() as session:
counts = {}
for label, model, filter_col in [
("Pages (total)", Page, None),
("Pages (published)", Page, Page.published == True),
("Blog Posts (total)", BlogPost, None),
("Blog Posts (pub.)", BlogPost, BlogPost.published == True),
("Site Settings", SiteSettings, None),
("CMS Users", User, None),
("Content Sections", ContentSection, None),
]:
stmt = select(func.count()).select_from(model)
if filter_col is not None:
stmt = stmt.where(filter_col)
result = await session.execute(stmt)
counts[label] = result.scalar_one()
# Section keys
sections_result = await session.execute(
select(ContentSection.key, ContentSection.updated_at).order_by(ContentSection.key)
)
section_rows = sections_result.all()
await engine.dispose()
# ── Connection panel ─────────────────────────────────────────────────────
conn_info = Table.grid(padding=(0, 2))
conn_info.add_column(style="dim")
conn_info.add_column(style="bold")
conn_info.add_row("Status", "[bold green]● Connected[/bold green]")
conn_info.add_row("Database", db_display)
console.print(Panel(conn_info, title="[bold green]Database Connection[/bold green]", border_style="green"))
console.print()
# ── Row counts ───────────────────────────────────────────────────────────
counts_table = Table(box=box.ROUNDED, title="[bold]Table Row Counts[/bold]", border_style="cyan")
counts_table.add_column("Table / Filter", style="cyan")
counts_table.add_column("Rows", justify="right", style="bold white")
for label, count in counts.items():
style = "green" if count > 0 else "dim"
counts_table.add_row(label, f"[{style}]{count}[/{style}]")
console.print(counts_table)
console.print()
# ── Section keys ─────────────────────────────────────────────────────────
if section_rows:
sec_table = Table(box=box.SIMPLE, title="[bold]Content Sections[/bold]", border_style="dim")
sec_table.add_column("Key", style="cyan")
sec_table.add_column("Last Updated", style="dim")
for key, updated_at in section_rows:
sec_table.add_row(key, updated_at.strftime("%Y-%m-%d %H:%M") if updated_at else "")
console.print(sec_table)
console.print()
# ── create-admin ──────────────────────────────────────────────────────────────
@app.command(name="create-admin")
def create_admin():
"""
[bold]Interactively create a new CMS admin user.[/bold]
Prompts for email and password. Password is bcrypt-hashed before storage.
"""
_print_banner()
console.print(Panel("[bold]Create Admin User[/bold]", border_style="cyan"))
email = Prompt.ask("[cyan]Email address[/cyan]")
password = Prompt.ask("[cyan]Password[/cyan]", password=True)
confirm = Prompt.ask("[cyan]Confirm password[/cyan]", password=True)
if password != confirm:
console.print("[bold red]✗ Passwords do not match[/bold red]")
raise typer.Exit(1)
if len(password) < 8:
console.print("[bold red]✗ Password must be at least 8 characters[/bold red]")
raise typer.Exit(1)
asyncio.run(_create_admin_async(email, password))
async def _create_admin_async(email: str, password: str):
settings = _load_settings()
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
from sqlalchemy import select
from app.models import User
from app.auth.password import hash_password
engine = _make_engine(settings)
Session = async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False)
async with Session() as session:
existing = await session.execute(select(User).where(User.email == email))
if existing.scalar_one_or_none() is not None:
console.print(f"[bold yellow]⚠ User already exists:[/bold yellow] {email}")
await engine.dispose()
raise typer.Exit(1)
user = User(email=email, hashed_password=hash_password(password), is_active=True)
session.add(user)
await session.commit()
await session.refresh(user)
await engine.dispose()
console.print(f"\n[bold green]✓ Admin user created[/bold green]")
info = Table.grid(padding=(0, 2))
info.add_column(style="dim")
info.add_column(style="bold cyan")
info.add_row("Email", email)
info.add_row("ID", str(user.id))
info.add_row("Active", "[green]yes[/green]")
console.print(Panel(info, border_style="green"))
console.print()
# ── routes ────────────────────────────────────────────────────────────────────
@app.command()
def routes():
"""
[bold]List all registered API routes.[/bold]
Displays method, path, and handler name for every endpoint.
"""
_print_banner()
from app.main import app as fastapi_app
table = Table(box=box.ROUNDED, title="[bold]Registered API Routes[/bold]", border_style="cyan")
table.add_column("Methods", style="bold yellow", width=20)
table.add_column("Path", style="cyan")
table.add_column("Handler", style="dim")
table.add_column("Auth", justify="center", width=6)
METHOD_COLORS = {
"GET": "green",
"POST": "yellow",
"PUT": "blue",
"DELETE": "red",
"PATCH": "magenta",
}
route_rows = []
for route in fastapi_app.routes:
if not hasattr(route, "methods"):
continue
methods = sorted(route.methods or [])
path = route.path
handler = route.endpoint.__name__ if hasattr(route, "endpoint") else ""
# Detect auth by checking if get_current_user is in dependencies
deps = getattr(route, "dependencies", [])
dep_names = [str(d) for d in deps]
auth_required = any("get_current_user" in d for d in dep_names)
# Also check endpoint's own dependencies via __wrapped__ or direct inspection
endpoint_deps = getattr(route.endpoint, "__wrapped__", route.endpoint)
method_str = " ".join(
f"[{METHOD_COLORS.get(m, 'white')}]{m}[/{METHOD_COLORS.get(m, 'white')}]"
for m in methods
)
route_rows.append((path, method_str, handler, auth_required))
# Sort: /api/v1 first, then /api/, then rest
def sort_key(r):
p = r[0]
if p.startswith("/api/v1"): return (0, p)
if p.startswith("/api"): return (1, p)
return (2, p)
route_rows.sort(key=sort_key)
for path, method_str, handler, auth in route_rows:
auth_icon = "[yellow]🔒[/yellow]" if auth else ""
table.add_row(method_str, path, handler, auth_icon)
console.print(table)
console.print(f"\n[dim]Total routes: {len(route_rows)}[/dim]\n")
# ── Interactive shell ─────────────────────────────────────────────────────────
MENU_ITEMS = [
("1", "dev", "Start the FastAPI dev server [dim](background thread)[/dim]"),
("2", "migrate", "Run Alembic migrations [dim](upgrade head)[/dim]"),
("3", "seed", "Seed admin user + all content"),
("4", "seed-content", "Re-seed content.json sections only"),
("5", "status", "Database connection + row counts"),
("6", "create-admin", "Create a new CMS admin user"),
("7", "routes", "List all registered API routes"),
("q", "quit", "Exit the CLI"),
]
# Tracks whether the dev server background thread is running
_server_thread = None
def _print_menu():
table = Table(box=box.SIMPLE, show_header=False, padding=(0, 2), border_style="dim")
table.add_column(style="bold cyan", width=4)
table.add_column(style="bold white", width=18)
table.add_column()
for key, _, description in MENU_ITEMS:
table.add_row(f"[{key}]", _, description)
console.print(Panel(table, title="[bold green]Goodwalk CLI — What would you like to do?[/bold green]", border_style="green"))
def _run_dev_in_background(host: str, port: int):
"""Start Uvicorn in a daemon thread so the prompt stays live."""
import threading
import uvicorn
global _server_thread
if _server_thread is not None and _server_thread.is_alive():
console.print("[yellow]⚠ Dev server is already running[/yellow]")
return
config = uvicorn.Config("app.main:app", host=host, port=port, reload=False, log_level="warning", access_log=False)
server = uvicorn.Server(config)
def _target():
asyncio.run(server.serve())
_server_thread = threading.Thread(target=_target, daemon=True, name="uvicorn")
_server_thread.start()
console.print(f"\n[bold green]✓ Dev server started[/bold green] → [cyan]http://{host}:{port}[/cyan] | docs: [cyan]http://{host}:{port}/docs[/cyan]")
console.print("[dim]Running in background — use [q] to exit the CLI (server stops with it)[/dim]\n")
def _dispatch(choice: str):
"""Run the selected command without exiting the shell."""
choice = choice.strip().lower()
if choice in ("1", "dev"):
settings = _load_settings()
_run_dev_in_background("127.0.0.1", 8000)
elif choice in ("2", "migrate"):
import subprocess
autogen = Confirm.ask("Generate a new migration? (no = upgrade head)", default=False)
if autogen:
msg = Prompt.ask("Migration message", default="auto")
cmd = [sys.executable, "-m", "alembic", "revision", "--autogenerate", "-m", msg]
else:
cmd = [sys.executable, "-m", "alembic", "upgrade", "head"]
with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=console) as p:
p.add_task("Running migration...", total=None)
result = subprocess.run(cmd, capture_output=True, text=True)
if result.returncode == 0:
console.print("[bold green]✓ Migration complete[/bold green]")
if result.stdout.strip():
console.print(f"[dim]{result.stdout.strip()}[/dim]")
else:
console.print(f"[bold red]✗ Migration failed[/bold red]\n[red]{result.stderr.strip()}[/red]")
elif choice in ("3", "seed"):
asyncio.run(_seed_async())
elif choice in ("4", "seed-content"):
asyncio.run(_seed_content_async())
elif choice in ("5", "status"):
asyncio.run(_status_async())
elif choice in ("6", "create-admin"):
console.print(Panel("[bold]Create Admin User[/bold]", border_style="cyan"))
email = Prompt.ask("[cyan]Email address[/cyan]")
password = Prompt.ask("[cyan]Password[/cyan]", password=True)
confirm = Prompt.ask("[cyan]Confirm password[/cyan]", password=True)
if password != confirm:
console.print("[bold red]✗ Passwords do not match[/bold red]")
elif len(password) < 8:
console.print("[bold red]✗ Password must be at least 8 characters[/bold red]")
else:
asyncio.run(_create_admin_async(email, password))
elif choice in ("7", "routes"):
routes()
elif choice in ("q", "quit", "exit"):
console.print("\n[dim]Goodbye 👋[/dim]\n")
raise SystemExit(0)
else:
console.print(f"[yellow]Unknown command:[/yellow] [bold]{choice}[/bold] — enter a number or letter from the menu")
@app.command()
def shell():
"""
[bold]Launch the interactive shell.[/bold]
Presents a menu after each command so you can keep working without restarting.
The dev server runs in a background thread — the prompt stays live.
"""
_print_banner()
console.print("[dim]Type a number, command name, or [bold]q[/bold] to quit. Ctrl+C also exits.[/dim]\n")
while True:
_print_menu()
try:
choice = Prompt.ask("[bold green]>[/bold green]", default="").strip()
except (KeyboardInterrupt, EOFError):
console.print("\n[dim]Goodbye 👋[/dim]\n")
break
if not choice:
continue
console.print()
try:
_dispatch(choice)
except SystemExit:
break
except Exception as e:
console.print(f"[bold red]✗ Error:[/bold red] {e}")
console.print()
# ── Entry point ───────────────────────────────────────────────────────────────
if __name__ == "__main__":
# Running with no arguments → drop straight into the interactive shell
if len(sys.argv) == 1:
sys.argv.append("shell")
app()
+461
View File
@@ -0,0 +1,461 @@
{
"siteSettings": {
"siteName": "Goodwalk",
"tagline": "Unleashing Fun in Your Dog's Day!",
"logo": {
"type": "image",
"src": "/images/logo-v6.png",
"alt": "Goodwalk",
"text": "Goodwalk"
},
"contact": {
"phone": "(022) 642 1011",
"email": "info@goodwalk.co.nz",
"address": "Auckland Central, New Zealand"
},
"social": {
"instagram": "https://www.instagram.com/goodwalk.nz",
"facebook": "https://www.facebook.com/goodwalk.nz",
"google": "https://g.page/goodwalk"
},
"seo": {
"defaultTitle": "Goodwalk Auckland Dog Walking Service",
"defaultDescription": "Trusted, professional dog walking services across Auckland Central. Pack walks, 1:1 walks and puppy visits."
}
},
"navigation": {
"items": [
{ "label": "Home", "url": "/" },
{ "label": "Pack Walks", "url": "/pack-walks" },
{ "label": "1:1 Walks", "url": "/1-1-walks" },
{ "label": "Puppy Visits", "url": "/puppy-visits" },
{ "label": "Our Pricing", "url": "/our-pricing" },
{ "label": "About Us", "url": "/about" },
{ "label": "Contact Us", "url": "/contact" }
]
},
"footer": {
"headline": "Let's get started!",
"contact": {
"email": "info@goodwalk.co.nz",
"phone": "(022) 642 1011"
},
"links": [
{ "label": "Home", "url": "/" },
{ "label": "Pack Walks", "url": "/pack-walks" },
{ "label": "1:1 Walks", "url": "/1-1-walks" },
{ "label": "Puppy Visits", "url": "/puppy-visits" },
{ "label": "Our Pricing", "url": "/our-pricing" },
{ "label": "Contact Us", "url": "/contact" }
],
"legalLinks": [
{ "label": "Terms & Conditions", "url": "/terms" },
{ "label": "Privacy", "url": "/privacy" }
],
"social": {
"instagram": "https://www.instagram.com/goodwalk.nz",
"facebook": "https://www.facebook.com/goodwalk.nz",
"google": "https://g.page/goodwalk"
},
"copyright": "© 2026 Goodwalk"
},
"pages": {
"home": {
"hero": {
"heading": "Unleashing Fun in Your Dog's Day!",
"subheading": "Unleashed fun in your dogs day!",
"buttons": [
{ "label": "Learn more", "url": "/pack-walks", "style": "solid" },
{ "label": "Enroll today", "url": "/contact", "style": "outline" }
]
},
"intro": {
"body": "Goodwalk delivers trusted, professional dog walking services across Auckland Central.",
"reviewsLink": {
"label": "All 5 star reviews on Google!",
"url": "https://g.page/goodwalk"
}
},
"happyPets": {
"heading": "Happy pets, happy humans",
"body": "Offering tailored pack walks for small & medium dogs, and one-on-one walks for large breeds! Our walkers give personalized attention to each dog, easing stress, anxiety and ensuring a quality experience. Our expertise in small-medium breeds ensures tailored care for their unique needs. Join our TINY GANG!",
"button": { "label": "Book now", "url": "/contact" }
},
"services": {
"heading": "What we do",
"items": [
{
"title": "Pack Walks",
"url": "/pack-walks",
"image": "/images/IMG_1226.jpg"
},
{
"title": "1:1 Walks",
"url": "/1-1-walks",
"image": "/images/one-on-one-walks-stella-rotated-e1711966505513.jpg"
},
{
"title": "Puppy Visits",
"url": "/puppy-visits",
"image": "/images/puppy-visits-splash-image.jpg"
}
]
},
"values": {
"heading": "Where dogs come first",
"items": [
{
"title": "Kindness",
"description": "With gentle care and genuine affection, we make every walk a calm, happy experience. We use positive reinforcement to encourage good behavior—because kindness is at the heart of everything we do."
},
{
"title": "Daily Updates",
"description": "Catch your pup in action with daily social updates—showcasing their walks, playtime, and mischief with the Tiny Gang. It's your window into their happiest moments."
},
{
"title": "Small Pack Sizes",
"description": "With just 48 dogs per group, our walks are calm, controlled, and respectful of public spaces—ensuring every dog gets the attention and care they deserve."
},
{
"title": "Safety",
"description": "Our team is fully pet first aid certified and trained to handle any situation calmly and confidently. With proactive safety protocols and constant situational awareness, we create a secure environment for every walk."
},
{
"title": "Flexibility",
"description": "We know life gets busy—so while we specialize in regular, permanent walks, we're always happy to adapt. Just give us a little notice, and we'll do our best to accommodate your changing schedule."
},
{
"title": "Reliability",
"description": "We guarantee punctuality and consistency, so you can count on us. With clear communication, you'll always be in the loop—and your dog's needs will always be our top priority."
}
]
},
"testimonials": {
"heading": "Why people choose us!",
"items": [
{
"quote": "Love Aless! She is so amazing with my slightly hyper and anxious dog. She is great with communication if anything on either of our ends need to change. Archie love his walks, and I love the photos she posts of him.",
"name": "Kate",
"subtitle": "Archie's mum"
},
{
"quote": "GoodWalk was the best dog walking service for my little pooch ! Aless was very helpful - basically doubled as a second mum to Monty. She always provided feedback on his outings and assisted where possible with any additional training that she felt he could work on and made recommendations where necessary which i feel is what every dog mum wants and needs!",
"name": "Estelle",
"subtitle": "Monty's mum"
},
{
"quote": "Truly the best dog walker in Auckland! I feel so lucky to have found Aless and my little terrier Otis absolutely adores her. He enjoys his regular weekly walks and always comes back happy & tired. Love the updates on social media so I can see how my dog is enjoying his day! Aless makes logistics so easy too. Highly highly recommend, there's a reason she has 5 stars!",
"name": "Ross",
"subtitle": "Otis's Dad"
},
{
"quote": "Alessandra has been walking and spending time with my pup since she was 10 weeks old, coming over and doing puppy visits through to transitioning her to pack walks with her little doggo friends. I know Alassandra loves and cares for my dog as much as I do and my dog has a great time! Cant recommend enough",
"name": "Nina",
"subtitle": "Wallace's mum"
}
],
"reviewsLink": {
"label": "All 5 star Google reviews",
"url": "https://g.page/goodwalk"
}
},
"booking": {
"heading": "Let's meet!",
"subheading": "Ready to get started? Book your free, no-obligation Meet & Greet today — just enter your details below"
},
"locations": {
"heading": "Locations & Hours",
"areasHeading": "We cover most Auckland Central's suburbs",
"suburbs": [
"Morningside", "Kingsland", "Ponsonby", "Grey Lynn", "Mt Albert",
"Mt Eden", "Sandringham", "Mt Roskill", "Arch Hill", "Freemans Bay",
"Herne Bay", "Pt Chevalier", "Avondale", "Three Kings", "Hillsbrough",
"Eden Terrace", "Balmoral"
],
"nearbyNote": "If you live in a nearby suburb get in touch!",
"hoursHeading": "Opening hours",
"hours": "We operate from Monday to Friday from 8am to 4pm."
},
"faq": {
"heading": "FAQ's",
"items": [
{
"question": "What happens if the weather is bad?",
"answer": "We operate in all weather conditions, except when their is a danger to the dog's health & safety."
},
{
"question": "What requirements does my dog need to have?",
"answer": "All dogs onboarding with Goodwalk need to have a current Auckland Council dog registration, be up to date with it's vaccinations to ensure the health and safety of other dogs."
},
{
"question": "Can any dog go with your service?",
"answer": "All dogs that are onboarded with us must go through our screening process which includes a minimum of two assessment walks"
},
{
"question": "How does with work with payments?",
"answer": "All walks are paid for a week in advance, via invoice."
},
{
"question": "Do you have insurance cover or First Aid training?",
"answer": "All trainers are covered by public liablity insurance, and all trainers hold a current First Aid training certificate."
}
]
}
},
"packWalks": {
"hero": {
"heading": "Pack Walks",
"subheading": "Join our Tiny Gang!",
"body": "Fun, safe, and specially designed for little paws, these adventures help your dog build friendships and confidence in a calm, friendly group.\n\nWe only welcome sociable dogs, so every outing feels secure and stress-free. As small dog owners ourselves, we know just what it takes to help your pup feel relaxed, happy, and right at home.\n\nJoin the Tiny Gang today—because your dog deserves more than just a walk. They deserve a tail-wagging good time!"
},
"tagline": "Goodwalk is the best choice for small and medium size dogs!",
"pricingIntro": "Our pack walks are a permanent booking of at least one walk day a week. Our Tiny Gang pack outing typically lasts 2 hours or more, including a one-hour walk at one of Auckland's scenic dog parks or beaches. Additionally, pick-up and drop-off services are provided for your convenience. We assist in reinforcing basic training, including recall, car manners, and leash etiquette. Gift your dog the best life!",
"plans": [
{
"name": "1 Walk",
"price": "$58",
"unit": "Per Walk",
"popular": false,
"features": ["Free pickup/dropoff", "1 hour adventure", "Social media updates", "Basic training"]
},
{
"name": "2-3 Walks",
"price": "$55",
"unit": "Per Walk",
"popular": false,
"features": ["Free pickup/dropoff", "1 hour adventure", "Social media updates", "Basic training"]
},
{
"name": "4-5 Walks",
"price": "$49.50",
"unit": "Per Walk",
"popular": true,
"features": ["Free pickup/dropoff", "1 hour adventure", "Social media updates", "Basic training"]
},
{
"name": "Casual Walk",
"price": "$65",
"unit": "Per Walk",
"popular": false,
"features": ["Free pickup/dropoff", "1 hour adventure", "Social media updates", "Basic training"]
}
],
"addons": [
{ "name": "Extra Dog (From same household)", "price": "$35" },
{ "name": "Muddy Wash", "price": "$35" },
{ "name": "5 Hour Day Out (Not suitable for all dogs)", "price": "$90" }
],
"benefits": [
{
"title": "Socialization with other dogs",
"description": "Tiny Gang pack walks help small and medium-sized dogs mingle and learn social skills from each other, boosting their confidence and positive behavior."
},
{
"title": "Tailored pace",
"description": "Our handlers can adjust the pace and intensity of the walk to suit the energy levels and abilities of small and medium-sized dogs, ensuring a pleasant and enjoyable experience for all participants."
},
{
"title": "Comfort",
"description": "Smaller groups create a more relaxed and comfortable atmosphere for dogs, allowing them to explore and enjoy the walk without feeling overwhelmed by larger dogs."
},
{
"title": "Increased bonding",
"description": "Tiny Gang pack walks foster stronger bonds between dogs and their walker, as well as between the dogs themselves, enhancing trust and companionship among the group."
},
{
"title": "Individualized attention",
"description": "Small pack sizes allow for more personalized care and attention from the walker, addressing the unique needs and preferences of small and medium-sized breeds."
},
{
"title": "Safety",
"description": "With a smaller group composed of dogs of similar sizes, there's reduced risk of accidental injury or intimidation, ensuring a safer walking environment."
}
],
"bookingHeading": "Join the Tiny Gang!"
},
"oneOnOneWalks": {
"hero": {
"heading": "1:1 Walks",
"subheading": "One-on-one attention for your dog",
"body": "Perfect for large breeds or dogs who prefer their own company. Our 1:1 walks give your dog undivided attention from an experienced handler in a focused, calm environment."
},
"plans": [
{
"name": "30 Minutes",
"price": "$45",
"unit": "Per Walk",
"popular": false,
"features": ["Free pickup/dropoff", "30 minute walk", "Social media updates", "Basic training"]
},
{
"name": "45 Minutes",
"price": "$55",
"unit": "Per Walk",
"popular": true,
"features": ["Free pickup/dropoff", "45 minute walk", "Social media updates", "Basic training"]
},
{
"name": "60 Minutes",
"price": "$65",
"unit": "Per Walk",
"popular": false,
"features": ["Free pickup/dropoff", "60 minute walk", "Social media updates", "Basic training"]
}
],
"bookingHeading": "Book a 1:1 Walk"
},
"puppyVisits": {
"hero": {
"heading": "Puppy Visits",
"subheading": "Introducing Puppy Visits: Building strong foundations for our pack walks!",
"body": "We love puppies! Our puppy home visits are perfect for young pups not quite ready to join the pack and busy owners with hectic schedules. We lay the groundwork for future pack walks, including fun games, potty breaks, and even feeding if required. Let us help your furry friend thrive while you're away!"
},
"plans": [
{
"name": "20 Minutes",
"price": "$39",
"unit": "Per Visit",
"popular": false,
"features": ["Bathroom break", "Pet feed", "Basic training", "Enrichment games"]
},
{
"name": "45 Minutes",
"price": "$49",
"unit": "Per Visit",
"popular": false,
"features": ["Bathroom break", "Pet feed", "Basic training", "Enrichment games"]
},
{
"name": "1 Hour",
"price": "$55",
"unit": "Per Visit",
"popular": false,
"features": ["Bathroom break", "Pet feed", "Basic training", "Enrichment games"]
}
],
"benefits": [
{
"title": "Enrichment",
"description": "From stimulating games to sensory toys, we keep those curious minds engaged and little tails wagging."
},
{
"title": "Setting up the basics for pack walks",
"description": "Lay the groundwork for your pup's adult life. We'll guide you through setting the right tone, offering basic training tips and tricks along the way."
},
{
"title": "Reduce anxiety",
"description": "With time your pup will know when to expect a visit, reducing the chances of accidents while you're away. With regular visits, your pup will feel loved and secure, minimizing any time spent at home alone."
},
{
"title": "Expert advise",
"description": "As experienced dog pawrents, we've been through it all with many adorable puppies. Consider us your go-to for any questions or concerns as your furry friend grows up."
}
],
"bookingHeading": "Ready to join the Tiny Gang?"
},
"pricing": {
"heading": "Our Pricing",
"subheading": "Transparent pricing, no hidden fees.",
"sections": [
{
"service": "Pack Walks",
"url": "/pack-walks",
"plans": [
{ "name": "1 Walk", "price": "$58", "unit": "Per Walk", "popular": false, "features": ["Free pickup/dropoff", "1 hour adventure", "Social media updates", "Basic training"] },
{ "name": "2-3 Walks", "price": "$55", "unit": "Per Walk", "popular": false, "features": ["Free pickup/dropoff", "1 hour adventure", "Social media updates", "Basic training"] },
{ "name": "4-5 Walks", "price": "$49.50", "unit": "Per Walk", "popular": true, "features": ["Free pickup/dropoff", "1 hour adventure", "Social media updates", "Basic training"] },
{ "name": "Casual Walk", "price": "$65", "unit": "Per Walk", "popular": false, "features": ["Free pickup/dropoff", "1 hour adventure", "Social media updates", "Basic training"] }
]
},
{
"service": "1:1 Walks",
"url": "/1-1-walks",
"plans": [
{ "name": "30 Minutes", "price": "$45", "unit": "Per Walk", "popular": false, "features": ["Free pickup/dropoff", "30 minute walk", "Social media updates", "Basic training"] },
{ "name": "45 Minutes", "price": "$55", "unit": "Per Walk", "popular": true, "features": ["Free pickup/dropoff", "45 minute walk", "Social media updates", "Basic training"] },
{ "name": "60 Minutes", "price": "$65", "unit": "Per Walk", "popular": false, "features": ["Free pickup/dropoff", "60 minute walk", "Social media updates", "Basic training"] }
]
},
{
"service": "Puppy Visits",
"url": "/puppy-visits",
"plans": [
{ "name": "20 Minutes", "price": "$39", "unit": "Per Visit", "popular": false, "features": ["Bathroom break", "Pet feed", "Basic training", "Enrichment games"] },
{ "name": "45 Minutes", "price": "$49", "unit": "Per Visit", "popular": false, "features": ["Bathroom break", "Pet feed", "Basic training", "Enrichment games"] },
{ "name": "1 Hour", "price": "$55", "unit": "Per Visit", "popular": false, "features": ["Bathroom break", "Pet feed", "Basic training", "Enrichment games"] }
]
}
]
},
"about": {
"heading": "About us",
"sections": [
{
"title": "Who we are",
"body": "At GoodWalk, we're not your average dog walking service. We're a team of passionate dog lovers dedicated to providing top-notch care for your furry friends. Specializing in small dogs, we understand their unique needs firsthand, being small dog owners ourselves! Our commitment to excellence has quickly made us a leader in Auckland Central's dog-walking scene. From pack walks to one-on-one sessions, we ensure the happiness and well-being of every dog in our care."
},
{
"title": "Our impact",
"body": "At GoodWalk, we believe in positive reinforcement training to help your dog thrive in the world. Safety, professionalism, well-being, fun, structure, and compassion are the cornerstones of our business ethos. When you choose GoodWalk, you're choosing a partner who will treat your dog like family because that's exactly what they are to us."
}
],
"team": {
"heading": "Meet the team",
"members": [
{
"name": "Alessandra",
"role": "Founder",
"bio": "Behind GoodWalk is Alessandra, an Italian who has a deep passion for dogs. With her love for animals and years of experience, Alessandra leads our team with dedication and expertise, ensuring that every dog receives the love and attention they deserve."
},
{
"name": "Maya",
"role": "Marketing Manager",
"bio": "And let's not forget about Maya, our marketing manager! A Cavalier King Charles cross Shih Tzu, Maya is full of sass and personality, bringing a touch of charm and flair to everything we do."
}
]
}
},
"contact": {
"heading": "Booking",
"subheading": "Let's meet!",
"body": "Ready to get started? Book your free, no-obligation Meet & Greet today — just enter your details below",
"contact": {
"email": "info@goodwalk.co.nz",
"phone": "(022) 642 1011"
},
"formServices": ["Pack Walks", "1:1 Walks", "Homestays", "Puppy Visits", "Other Services"]
}
},
"testimonials": [
{
"quote": "Love Aless! She is so amazing with my slightly hyper and anxious dog. She is great with communication if anything on either of our ends need to change. Archie love his walks, and I love the photos she posts of him.",
"name": "Kate",
"subtitle": "Archie's mum"
},
{
"quote": "GoodWalk was the best dog walking service for my little pooch ! Aless was very helpful - basically doubled as a second mum to Monty. She always provided feedback on his outings and assisted where possible with any additional training that she felt he could work on and made recommendations where necessary which i feel is what every dog mum wants and needs!",
"name": "Estelle",
"subtitle": "Monty's mum"
},
{
"quote": "Truly the best dog walker in Auckland! I feel so lucky to have found Aless and my little terrier Otis absolutely adores her. He enjoys his regular weekly walks and always comes back happy & tired. Love the updates on social media so I can see how my dog is enjoying his day! Aless makes logistics so easy too. Highly highly recommend, there's a reason she has 5 stars!",
"name": "Ross",
"subtitle": "Otis's Dad"
},
{
"quote": "Alessandra has been walking and spending time with my pup since she was 10 weeks old, coming over and doing puppy visits through to transitioning her to pack walks with her little doggo friends. I know Alassandra loves and cares for my dog as much as I do and my dog has a great time! Cant recommend enough",
"name": "Nina",
"subtitle": "Wallace's mum"
}
]
}
+7
View File
@@ -0,0 +1,7 @@
[pytest]
asyncio_mode = auto
testpaths = tests
addopts = -v --tb=short
markers =
security: marks tests as security / hardening tests (OWASP ASVS / API Top 10)
members_admin: marks tests for the members area and admin-members endpoints
+21
View File
@@ -0,0 +1,21 @@
fastapi>=0.111.0
uvicorn[standard]>=0.29.0
sqlalchemy[asyncio]>=2.0.0
asyncpg>=0.29.0
alembic>=1.13.0
pydantic>=2.0.0
pydantic-settings>=2.0.0
python-jose[cryptography]>=3.3.0
bcrypt>=4.0.0
slowapi>=0.1.9
nh3>=0.2.14
python-multipart>=0.0.9
typer>=0.12.0
rich>=13.7.0
pytest>=8.0.0
pytest-asyncio>=0.23.0
pytest-cov>=5.0.0
httpx>=0.27.0
aiosqlite>=0.20.0
user-agents>=2.2.0
email-validator>=2.0.0
+96
View File
@@ -0,0 +1,96 @@
"""
Seed script — creates default admin user, sample page, sample blog post,
and site settings. Run once after running Alembic migrations.
Usage:
python seed.py
"""
import asyncio
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from sqlalchemy import select
from app.database import AsyncSessionLocal, engine
from app.models.base import Base
from app.models.user import User
from app.models.page import Page
from app.models.post import BlogPost
from app.models.settings import SiteSettings
from app.auth.password import hash_password
async def seed():
async with AsyncSessionLocal() as session:
# ── Admin user ────────────────────────────────────────────────────────
existing_user = await session.execute(
select(User).where(User.email == "admin@example.com")
)
if existing_user.scalars().first() is None:
user = User(
email="admin@example.com",
hashed_password=hash_password("changeme123"),
is_active=True,
)
session.add(user)
print("Created admin user: admin@example.com / changeme123")
else:
print("Admin user already exists, skipping.")
# ── Sample Page ───────────────────────────────────────────────────────
existing_page = await session.execute(
select(Page).where(Page.slug == "home")
)
if existing_page.scalars().first() is None:
page = Page(
title="Home",
slug="home",
published=True,
body="<h1>Welcome</h1><p>This is the home page.</p>",
)
session.add(page)
print("Created sample page: Home")
else:
print("Home page already exists, skipping.")
# ── Sample BlogPost ───────────────────────────────────────────────────
existing_post = await session.execute(
select(BlogPost).where(BlogPost.slug == "hello-world")
)
if existing_post.scalars().first() is None:
post = BlogPost(
title="Hello World",
slug="hello-world",
published=True,
author="Admin",
excerpt="Our first post",
body="<p>Welcome to our blog.</p>",
tags=["news", "welcome"],
)
session.add(post)
print("Created sample blog post: Hello World")
else:
print("Hello World post already exists, skipping.")
# ── SiteSettings ──────────────────────────────────────────────────────
existing_settings = await session.execute(select(SiteSettings).limit(1))
if existing_settings.scalars().first() is None:
site_settings = SiteSettings(
site_name="Goodwalk",
tagline="Dog walking in Auckland",
logo_url="",
footer_text="© 2025 Goodwalk",
social_links={"facebook": "", "instagram": ""},
)
session.add(site_settings)
print("Created site settings.")
else:
print("Site settings already exist, skipping.")
await session.commit()
print("\nSeed complete.")
if __name__ == "__main__":
asyncio.run(seed())
+72
View File
@@ -0,0 +1,72 @@
"""
Seed content_sections table from data/content.json.
Run from the backend/ directory:
python seed_content.py
"""
import asyncio
import json
from pathlib import Path
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
from app.config import settings
from app.models import Base, ContentSection
CONTENT_FILE = Path(__file__).parent / "data" / "content.json"
def build_sections(content: dict) -> dict:
pages = content.get("pages", {})
return {
"siteSettings": content.get("siteSettings", {}),
"navigation": content.get("navigation", {}),
"footer": content.get("footer", {}),
"testimonials": content.get("testimonials", []),
"pages.home": pages.get("home", {}),
"pages.packWalks": pages.get("packWalks", {}),
"pages.oneOnOneWalks": pages.get("oneOnOneWalks", {}),
"pages.puppyVisits": pages.get("puppyVisits", {}),
"pages.pricing": pages.get("pricing", {}),
"pages.about": pages.get("about", {}),
"pages.contact": pages.get("contact", {}),
}
async def seed():
if not CONTENT_FILE.exists():
print(f"Content file not found: {CONTENT_FILE}")
return
with open(CONTENT_FILE, encoding="utf-8") as f:
content = json.load(f)
sections = build_sections(content)
engine = create_async_engine(settings.DATABASE_URL, echo=False)
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
Session = async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False)
async with Session() as session:
for key, data in sections.items():
from sqlalchemy import select
result = await session.execute(
select(ContentSection).where(ContentSection.key == key)
)
row = result.scalar_one_or_none()
if row:
row.data = data
print(f" updated: {key}")
else:
session.add(ContentSection(key=key, data=data))
print(f" inserted: {key}")
await session.commit()
await engine.dispose()
print(f"\nSeeded {len(sections)} sections from {CONTENT_FILE}")
if __name__ == "__main__":
asyncio.run(seed())
View File
+139
View File
@@ -0,0 +1,139 @@
"""
Test configuration and shared fixtures.
Uses an in-memory SQLite async database to avoid needing PostgreSQL in CI.
The get_db dependency is overridden so all tests use the test database.
"""
import pytest
import pytest_asyncio
from httpx import AsyncClient, ASGITransport
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
from typing import AsyncGenerator
from app.main import app
from app.database import get_db
from app.models.base import Base
from app.models import User, Page, BlogPost, SiteSettings, RefreshToken, Experiment, ExperimentVariant, ExperimentEvent # noqa: F401 register models
from app.models import Member, MemberVerificationCode, MemberRefreshToken, Walk, Booking, AdminMessage # noqa: F401 register member models
from app.models import ContactLead # noqa: F401 register contact lead model
from app.auth.password import hash_password
from app.services.experiments import sync_experiment_registry
# pytest-asyncio settings
pytest_plugins = ["pytest_asyncio"]
# In-memory SQLite for tests
TEST_DATABASE_URL = "sqlite+aiosqlite:///:memory:"
test_engine = create_async_engine(
TEST_DATABASE_URL,
connect_args={"check_same_thread": False},
echo=False,
)
TestSessionLocal = async_sessionmaker(
bind=test_engine,
class_=AsyncSession,
expire_on_commit=False,
autoflush=False,
autocommit=False,
)
async def override_get_db() -> AsyncGenerator[AsyncSession, None]:
async with TestSessionLocal() as session:
try:
yield session
await session.commit()
except Exception:
await session.rollback()
raise
finally:
await session.close()
@pytest_asyncio.fixture(scope="session", autouse=True)
async def setup_database():
"""Create all tables once per test session."""
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
async with TestSessionLocal() as session:
await sync_experiment_registry(session)
await session.commit()
yield
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await test_engine.dispose()
@pytest_asyncio.fixture(autouse=True)
async def clean_tables():
"""Truncate tables between tests for isolation."""
yield
async with test_engine.begin() as conn:
for table in reversed(Base.metadata.sorted_tables):
await conn.execute(table.delete())
async with TestSessionLocal() as session:
await sync_experiment_registry(session)
await session.commit()
@pytest.fixture(autouse=True)
def reset_rate_limiter():
"""Reset slowapi's in-memory rate-limit counters before each test.
Without this, rapid sequential test runs exhaust the per-IP limits
(e.g. 5/min on /auth/login) and cause cascading 429 errors that mask
the actual behaviour under test.
"""
from app.middleware.rate_limit import limiter
limiter._storage.reset()
yield
@pytest_asyncio.fixture
async def client() -> AsyncGenerator[AsyncClient, None]:
app.dependency_overrides[get_db] = override_get_db
async with AsyncClient(
transport=ASGITransport(app=app), base_url="http://test"
) as ac:
yield ac
app.dependency_overrides.clear()
@pytest_asyncio.fixture
async def admin_user():
"""Create an admin user in the test database and return it."""
async with TestSessionLocal() as session:
user = User(
email="admin@example.com",
hashed_password=hash_password("testpassword"),
is_active=True,
)
session.add(user)
await session.commit()
await session.refresh(user)
return user
@pytest_asyncio.fixture
async def admin_token(client: AsyncClient, admin_user: User) -> str:
"""Log in as the admin user and return the Bearer access token."""
response = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "testpassword"},
)
assert response.status_code == 200, f"Login failed: {response.text}"
return response.json()["access_token"]
@pytest_asyncio.fixture
async def db_session() -> AsyncGenerator[AsyncSession, None]:
"""Yield a live test-database session for direct state manipulation in tests.
Useful when a test needs to insert or update rows outside the HTTP layer
(e.g. marking a user inactive before testing the login rejection).
Changes must be committed explicitly by the caller.
"""
async with TestSessionLocal() as session:
yield session
+328
View File
@@ -0,0 +1,328 @@
"""
Authentication security tests.
Control coverage
────────────────
OWASP ASVS v4.0 V2 Authentication Verification
V3 Session Management Verification
OWASP API Top 10 API2:2023 Broken Authentication
"""
import base64
import json
from datetime import timedelta
import pytest
from httpx import AsyncClient
from jose import jwt as jose_jwt
from sqlalchemy import update as sa_update
from app.auth.jwt import create_access_token
from app.models.user import User
pytestmark = pytest.mark.asyncio
# ── V2.1 Password security ───────────────────────────────────────────────────
class TestCredentialValidation:
"""ASVS V2.1 — Credential acceptance and rejection rules."""
async def test_wrong_password_returns_401(self, client: AsyncClient, admin_user):
"""ASVS 2.1.1 | API2 — Incorrect password is rejected with 401."""
resp = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "wrongpassword"},
)
assert resp.status_code == 401
async def test_unknown_email_returns_401(self, client: AsyncClient):
"""ASVS 2.1.1 | API2 — Unregistered email returns 401, not 404.
Returning 404 for an unknown email would allow attackers to enumerate
registered accounts.
"""
resp = await client.post(
"/api/v1/auth/login",
json={"email": "ghost@example.com", "password": "anything"},
)
assert resp.status_code == 401
async def test_empty_password_rejected(self, client: AsyncClient, admin_user):
"""ASVS 2.1.1 — Empty password string is rejected."""
resp = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": ""},
)
assert resp.status_code in (401, 422)
async def test_null_password_rejected(self, client: AsyncClient, admin_user):
"""ASVS 2.1.1 — Null password field fails schema validation."""
resp = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": None},
)
assert resp.status_code == 422
async def test_missing_fields_rejected(self, client: AsyncClient):
"""ASVS 2.1.1 — Requests missing required auth fields return 422."""
resp = await client.post("/api/v1/auth/login", json={})
assert resp.status_code == 422
async def test_very_long_password_handled_safely(self, client: AsyncClient, admin_user):
"""ASVS 2.1.7 — Passwords of 1 000+ characters must not cause a 500.
bcrypt >= 4.0 raises ValueError('Password must be 72 bytes or fewer') when
checkpw() is called with an oversized password. The app must catch this and
return 401 rather than propagating a 500.
"""
resp = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "x" * 1000},
)
assert resp.status_code == 401
async def test_unicode_password_handled_safely(self, client: AsyncClient, admin_user):
"""ASVS 2.1.4 — Multi-byte / emoji passwords do not cause a 500."""
resp = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "пароль🔑emoji"},
)
assert resp.status_code == 401
async def test_inactive_account_rejected(
self, client: AsyncClient, admin_user, db_session
):
"""ASVS 2.1.10 — Deactivated accounts cannot authenticate.
is_active=False is the soft-disable mechanism; the login handler checks
this after verifying the password.
"""
await db_session.execute(
sa_update(User)
.where(User.id == admin_user.id)
.values(is_active=False)
)
await db_session.commit()
resp = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "testpassword"},
)
assert resp.status_code == 401
async def test_error_response_does_not_enumerate_users(
self, client: AsyncClient, admin_user
):
"""ASVS 2.2.2 | API2 — Bad password and unknown email return identical status codes.
A differing status code (e.g. 404 vs 401) or error message leaks
whether an address is registered, enabling user enumeration.
"""
wrong_pass = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "wrong"},
)
no_user = await client.post(
"/api/v1/auth/login",
json={"email": "nobody@example.com", "password": "wrong"},
)
assert wrong_pass.status_code == no_user.status_code == 401
# ── V3.5 Token-based session management ─────────────────────────────────────
class TestJWTSecurity:
"""ASVS V3.5 | API2 — JWT access token validation controls."""
async def test_no_auth_header_rejected(self, client: AsyncClient):
"""ASVS 3.5.1 | API2 — Write endpoint with no Authorization header is denied."""
resp = await client.post(
"/api/v1/pages",
json={"title": "x", "slug": "x", "body": "x"},
)
assert resp.status_code in (401, 403)
async def test_garbage_bearer_token_rejected(self, client: AsyncClient):
"""ASVS 3.5.1 | API2 — Arbitrary string in Bearer position is rejected."""
resp = await client.post(
"/api/v1/pages",
json={"title": "x", "slug": "x", "body": "x"},
headers={"Authorization": "Bearer not-a-real-jwt"},
)
assert resp.status_code in (401, 403)
async def test_wrong_signing_key_rejected(self, client: AsyncClient, admin_user):
"""ASVS 3.5.1 | API2 — JWT signed with a different secret is rejected.
Tokens signed with a different key have a valid structure but fail
signature verification against the server's SECRET_KEY.
"""
fake_token = jose_jwt.encode(
{"sub": str(admin_user.id)}, "wrong-secret", algorithm="HS256"
)
resp = await client.post(
"/api/v1/pages",
json={"title": "x", "slug": "x", "body": "x"},
headers={"Authorization": f"Bearer {fake_token}"},
)
assert resp.status_code in (401, 403)
async def test_expired_access_token_rejected(self, client: AsyncClient, admin_user):
"""ASVS 3.5.2 | API2 — Expired JWT is rejected even if the signature is valid.
An expired token is issued with expires_delta in the past (-1 s), so
the 'exp' claim is already exceeded at the time of the request.
"""
expired = create_access_token(
data={"sub": str(admin_user.id)},
expires_delta=timedelta(seconds=-1),
)
resp = await client.post(
"/api/v1/pages",
json={"title": "x", "slug": "x", "body": "x"},
headers={"Authorization": f"Bearer {expired}"},
)
assert resp.status_code in (401, 403)
async def test_alg_none_attack_rejected(self, client: AsyncClient, admin_user):
"""ASVS 3.5.3 | API2 — JWT with algorithm 'none' (unsigned) is rejected.
The 'alg: none' attack tricks vulnerable verifiers into accepting
unsigned tokens. python-jose rejects them when a key is expected.
The unsigned token is constructed manually to avoid library restrictions.
"""
header_b64 = base64.urlsafe_b64encode(
b'{"alg":"none","typ":"JWT"}'
).rstrip(b"=").decode()
payload_b64 = base64.urlsafe_b64encode(
json.dumps({"sub": str(admin_user.id)}).encode()
).rstrip(b"=").decode()
none_token = f"{header_b64}.{payload_b64}."
resp = await client.post(
"/api/v1/pages",
json={"title": "x", "slug": "x", "body": "x"},
headers={"Authorization": f"Bearer {none_token}"},
)
assert resp.status_code in (401, 403)
async def test_tampered_payload_rejected(self, client: AsyncClient, admin_user):
"""ASVS 3.5.1 | API2 — JWT with a replaced payload but original signature is rejected.
The signature covers the original header+payload. Swapping the payload
invalidates the signature, so the token must be rejected even though
the signature portion itself is a valid HMAC.
"""
valid_token = create_access_token(data={"sub": str(admin_user.id)})
header, _, signature = valid_token.split(".")
fake_payload = base64.urlsafe_b64encode(
json.dumps({"sub": "00000000-0000-0000-0000-000000000000"}).encode()
).rstrip(b"=").decode()
tampered = f"{header}.{fake_payload}.{signature}"
resp = await client.post(
"/api/v1/pages",
json={"title": "x", "slug": "x", "body": "x"},
headers={"Authorization": f"Bearer {tampered}"},
)
assert resp.status_code in (401, 403)
async def test_token_in_query_string_rejected(self, client: AsyncClient, admin_user):
"""ASVS 3.5.1 | API2 — Bearer token passed as a query parameter is rejected.
Tokens in URLs appear in server logs, browser history, and Referer
headers, making them trivially leakable. Only the Authorization header
is accepted.
"""
valid_token = create_access_token(data={"sub": str(admin_user.id)})
resp = await client.post(
f"/api/v1/pages?token={valid_token}",
json={"title": "x", "slug": "x", "body": "x"},
)
assert resp.status_code in (401, 403)
async def test_basic_auth_scheme_rejected(self, client: AsyncClient, admin_user):
"""ASVS 3.5.1 | API2 — HTTP Basic Auth scheme is not accepted; Bearer is required."""
import base64 as b64
credentials = b64.b64encode(b"admin@example.com:testpassword").decode()
resp = await client.post(
"/api/v1/pages",
json={"title": "x", "slug": "x", "body": "x"},
headers={"Authorization": f"Basic {credentials}"},
)
assert resp.status_code in (401, 403)
# ── V3.5 Refresh token rotation and revocation ───────────────────────────────
class TestRefreshTokenSecurity:
"""ASVS V3.5 | API2 — Refresh token single-use rotation and revocation."""
async def test_refresh_token_is_rotated_and_old_token_revoked(
self, client: AsyncClient, admin_user
):
"""ASVS 3.5.2 — After rotation the original refresh token cannot be reused.
The server performs one-time-use rotation: on each /auth/refresh call the
presented token is revoked atomically and a new pair is issued. Presenting
the old token a second time must return 401.
"""
login = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "testpassword"},
)
assert login.status_code == 200
original_refresh = login.json()["refresh_token"]
rotate = await client.post(
"/api/v1/auth/refresh",
json={"refresh_token": original_refresh},
)
assert rotate.status_code == 200
assert rotate.json()["refresh_token"] != original_refresh
reuse = await client.post(
"/api/v1/auth/refresh",
json={"refresh_token": original_refresh},
)
assert reuse.status_code == 401
async def test_forged_refresh_token_rejected(self, client: AsyncClient):
"""ASVS 3.5.2 | API2 — A randomly-generated string is not a valid refresh token."""
resp = await client.post(
"/api/v1/auth/refresh",
json={"refresh_token": "totally-made-up-random-value"},
)
assert resp.status_code == 401
async def test_empty_refresh_token_rejected(self, client: AsyncClient):
"""ASVS 3.5.2 — Empty refresh token string is rejected."""
resp = await client.post(
"/api/v1/auth/refresh",
json={"refresh_token": ""},
)
assert resp.status_code in (401, 422)
async def test_new_access_token_is_functional(
self, client: AsyncClient, admin_user
):
"""ASVS 3.5.2 — Access token issued after a refresh is accepted by protected endpoints."""
login = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "testpassword"},
)
original_refresh = login.json()["refresh_token"]
rotate = await client.post(
"/api/v1/auth/refresh",
json={"refresh_token": original_refresh},
)
new_access = rotate.json()["access_token"]
resp = await client.post(
"/api/v1/pages",
json={"title": "Post-refresh", "slug": "post-refresh", "body": "<p>ok</p>"},
headers={"Authorization": f"Bearer {new_access}"},
)
assert resp.status_code == 201
+195
View File
@@ -0,0 +1,195 @@
"""
Authorization and access-control security tests.
Control coverage
────────────────
OWASP ASVS v4.0 V4 Access Control Verification
OWASP API Top 10 API1:2023 Broken Object Level Authorization (BOLA)
API3:2023 Broken Object Property Level Authorization
API5:2023 Broken Function Level Authorization
"""
import pytest
from httpx import AsyncClient
pytestmark = pytest.mark.asyncio
# ── V4.1 General access control ─────────────────────────────────────────────
class TestUnauthenticatedWriteAccess:
"""ASVS V4.1 | API5 — Every write/mutate endpoint denies unauthenticated callers."""
@pytest.mark.parametrize("method,path,body", [
("POST", "/api/v1/pages", {"title": "T", "slug": "s", "body": "b"}),
("PUT", "/api/v1/pages/any-slug", {"title": "T"}),
("DELETE", "/api/v1/pages/any-slug", None),
("POST", "/api/v1/posts", {"title": "T", "slug": "s", "body": "b"}),
("PUT", "/api/v1/posts/any-slug", {"title": "T"}),
("DELETE", "/api/v1/posts/any-slug", None),
("PUT", "/api/v1/settings", {"site_name": "X"}),
("GET", "/api/v1/analytics/summary", None),
])
async def test_endpoint_requires_auth(
self, client: AsyncClient, method: str, path: str, body: dict | None
):
"""ASVS 4.1.1 | API5 — {method} {path} returns 401/403 without credentials."""
fn = getattr(client, method.lower())
kwargs = {"json": body} if body else {}
resp = await fn(path, **kwargs)
assert resp.status_code in (401, 403), (
f"{method} {path}: expected 401/403 without auth, got {resp.status_code}"
)
async def test_malformed_auth_scheme_rejected(self, client: AsyncClient):
"""ASVS 4.1.1 | API2 — Non-Bearer Authorization schemes are denied."""
bad_headers = [
"Basic dXNlcjpwYXNz", # HTTP Basic
"Token some-api-key", # Token scheme
"Bearer", # Missing credential
]
for auth in bad_headers:
resp = await client.post(
"/api/v1/pages",
json={"title": "x", "slug": "x", "body": "x"},
headers={"Authorization": auth},
)
assert resp.status_code in (401, 403), (
f"Expected 401/403 for Authorization: {auth!r}"
)
# ── V4.2 Object-level authorization (BOLA) ──────────────────────────────────
class TestObjectLevelAuthorization:
"""ASVS V4.2 | API1 BOLA — Objects cannot be accessed or mutated without authorization."""
async def test_nonexistent_page_returns_404_not_403(self, client: AsyncClient):
"""ASVS 4.2.1 | API1 — Missing resource returns 404, not 403 or 500.
Returning 403 for non-existent resources would reveal that the resource
exists but is protected; 404 is the correct public response.
"""
resp = await client.get("/api/v1/pages/this-slug-does-not-exist-999")
assert resp.status_code == 404
async def test_path_traversal_in_slug_is_rejected(self, client: AsyncClient):
"""ASVS 4.2.1 | API1 — Path-traversal sequences in slug parameters are rejected.
URL-encoded and plain traversal strings must not resolve to real resources
or cause server errors.
"""
traversal_slugs = [
"../admin",
"..%2fadmin",
"%2e%2e/secret",
"../../etc/passwd",
]
for slug in traversal_slugs:
resp = await client.get(f"/api/v1/pages/{slug}")
assert resp.status_code in (404, 422), (
f"Unexpected {resp.status_code} for slug {slug!r}"
)
async def test_delete_nonexistent_resource_returns_404(
self, client: AsyncClient, admin_token: str
):
"""ASVS 4.2.1 | API1 — DELETE on an absent resource returns 404, not 204.
Returning 204 for missing resources would silently confirm that the
operation succeeded, masking business-logic gaps.
"""
resp = await client.delete(
"/api/v1/pages/genuinely-does-not-exist",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 404
async def test_update_nonexistent_resource_returns_404(
self, client: AsyncClient, admin_token: str
):
"""ASVS 4.2.1 | API1 — PUT on a missing slug returns 404."""
resp = await client.put(
"/api/v1/pages/genuinely-does-not-exist",
json={"title": "Updated"},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 404
# ── V4.3 Mass assignment (object property authorization) ─────────────────────
class TestMassAssignment:
"""ASVS V4.3 | API3 — Server-side models reject undeclared or privileged fields."""
async def test_extra_fields_in_create_are_silently_dropped(
self, client: AsyncClient, admin_token: str
):
"""ASVS 4.3.1 | API3 — Injected undeclared fields are not stored or echoed.
Pydantic's schema strips fields not declared in PageCreate.
The response must not contain 'is_admin', 'hashed_password', or any
caller-supplied 'id'.
"""
resp = await client.post(
"/api/v1/pages",
json={
"title": "Mass-assign test",
"slug": "mass-assign-create",
"body": "<p>body</p>",
"published": True,
# Injected fields that must be dropped
"is_admin": True,
"hashed_password": "injected",
"id": "00000000-0000-0000-0000-000000000001",
"internal_field": "evil",
},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 201
data = resp.json()
assert "is_admin" not in data
assert "hashed_password" not in data
assert "internal_field" not in data
assert data.get("id") != "00000000-0000-0000-0000-000000000001"
async def test_extra_fields_in_update_are_silently_dropped(
self, client: AsyncClient, admin_token: str
):
"""ASVS 4.3.1 | API3 — Injected fields in PUT body are stripped by the schema."""
await client.post(
"/api/v1/pages",
json={"title": "Base", "slug": "mass-assign-update", "body": "<p>b</p>"},
headers={"Authorization": f"Bearer {admin_token}"},
)
resp = await client.put(
"/api/v1/pages/mass-assign-update",
json={"title": "Updated", "hacked_field": "injected", "is_admin": True},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
data = resp.json()
assert "hacked_field" not in data
assert "is_admin" not in data
async def test_published_flag_is_controlled_by_caller(
self, client: AsyncClient, admin_token: str
):
"""API3 — The 'published' field is an intentional caller-controlled property.
This test documents that any authenticated user can publish content.
There is no role separation between 'editor' and 'publisher' roles.
If RBAC is added in future, this test should be updated to reflect
the intended access model.
"""
resp = await client.post(
"/api/v1/pages",
json={
"title": "Published Page",
"slug": "published-by-caller",
"body": "<p>visible</p>",
"published": True,
},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 201
assert resp.json()["published"] is True
+205
View File
@@ -0,0 +1,205 @@
"""
Security configuration, HTTP headers, CORS, and error-handling tests.
Control coverage
OWASP ASVS v4.0 V9 Communication Security
V14 Configuration Verification
OWASP API Top 10 API8:2023 Security Misconfiguration
API9:2023 Improper Inventory Management
"""
import pytest
from httpx import AsyncClient
pytestmark = pytest.mark.asyncio
# ── V14.4 HTTP security response headers ────────────────────────────────────
class TestSecurityHeaders:
"""ASVS V14.4 | API8 — Security response headers harden browser behaviour.
These headers are typically applied by a middleware class or a reverse-proxy
(e.g. nginx).
"""
async def test_x_content_type_options_nosniff(self, client: AsyncClient):
"""ASVS 14.4.3 | API8 — X-Content-Type-Options: nosniff must be present."""
resp = await client.get("/health")
assert resp.headers.get("x-content-type-options") == "nosniff"
async def test_x_frame_options_present(self, client: AsyncClient):
"""ASVS 14.4.5 | API8 — X-Frame-Options: DENY must be present."""
resp = await client.get("/health")
assert resp.headers.get("x-frame-options") is not None
async def test_content_security_policy_present(self, client: AsyncClient):
"""ASVS 14.4.6 | API8 — A Content-Security-Policy header must be present."""
resp = await client.get("/health")
assert "content-security-policy" in resp.headers
async def test_strict_transport_security_present(self, client: AsyncClient):
"""ASVS 9.2.2 | API8 — Strict-Transport-Security must be present."""
resp = await client.get("/health")
assert "strict-transport-security" in resp.headers
async def test_referrer_policy_present(self, client: AsyncClient):
"""ASVS 14.4.4 | API8 — Referrer-Policy must be present."""
resp = await client.get("/health")
assert "referrer-policy" in resp.headers
# ── V14.3 Error handling ─────────────────────────────────────────────────────
class TestErrorHandling:
"""ASVS V14.3 | API8 — Error responses contain no internal implementation details."""
async def test_404_does_not_expose_internals(self, client: AsyncClient):
"""ASVS 14.3.2 — 404 for an unknown route contains no stack trace or file paths."""
resp = await client.get("/api/v1/this-endpoint-does-not-exist-xyz")
assert resp.status_code == 404
body = resp.text
assert "Traceback" not in body
assert "site-packages" not in body
assert "File /" not in body
async def test_422_validation_error_returns_clean_json(
self, client: AsyncClient, admin_token: str
):
"""ASVS 14.3.2 | API8 — Validation failures return Pydantic's structured JSON, no stack trace."""
resp = await client.post(
"/api/v1/pages",
json={"title": 99, "slug": [], "body": None},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 422
data = resp.json()
assert "detail" in data
assert "Traceback" not in resp.text
assert "site-packages" not in resp.text
async def test_401_response_includes_www_authenticate(self, client: AsyncClient):
"""ASVS 3.5.1 | API2 — 401 from the auth layer includes WWW-Authenticate: Bearer."""
resp = await client.post(
"/api/v1/auth/login",
json={"email": "nobody@example.com", "password": "wrong"},
)
assert resp.status_code == 401
# WWW-Authenticate is required by RFC 7235 for 401 responses
assert "www-authenticate" in resp.headers
async def test_malformed_json_returns_422_not_500(
self, client: AsyncClient, admin_token: str
):
"""ASVS 14.3.2 — Syntactically invalid JSON body returns 422, not 500."""
resp = await client.post(
"/api/v1/pages",
content=b"{{not valid json at all{{",
headers={
"Authorization": f"Bearer {admin_token}",
"Content-Type": "application/json",
},
)
assert resp.status_code == 422
assert resp.status_code != 500
async def test_unexpected_content_type_handled(
self, client: AsyncClient, admin_token: str
):
"""ASVS 14.3.2 — Sending plain text to a JSON endpoint returns 422, not 500."""
resp = await client.post(
"/api/v1/pages",
content=b"title=Test&slug=test&body=body",
headers={
"Authorization": f"Bearer {admin_token}",
"Content-Type": "text/plain",
},
)
assert resp.status_code in (415, 422)
assert resp.status_code != 500
# ── V14.5 CORS policy ────────────────────────────────────────────────────────
class TestCORSPolicy:
"""ASVS V14.5 | API8 — Cross-Origin Resource Sharing is restricted to declared origins."""
async def test_allowed_origin_receives_acao_header(self, client: AsyncClient):
"""ASVS 14.5.2 — Preflight from an allowed origin gets the correct ACAO header."""
resp = await client.options(
"/api/v1/pages",
headers={
"Origin": "http://localhost:5173",
"Access-Control-Request-Method": "GET",
},
)
acao = resp.headers.get("access-control-allow-origin", "")
assert acao == "http://localhost:5173"
async def test_disallowed_origin_does_not_receive_acao_header(
self, client: AsyncClient
):
"""ASVS 14.5.2 | API8 — Preflight from an unknown origin is not granted cross-origin access.
The ACAO header must not be echoed back for arbitrary origins, and must
not be the wildcard '*', since credentials are enabled.
"""
resp = await client.options(
"/api/v1/pages",
headers={
"Origin": "https://evil.example.com",
"Access-Control-Request-Method": "POST",
},
)
acao = resp.headers.get("access-control-allow-origin", "")
assert acao != "https://evil.example.com"
assert acao != "*"
async def test_production_origin_receives_acao_header(self, client: AsyncClient):
"""ASVS 14.5.2 — The production domain is in the CORS allowlist."""
resp = await client.options(
"/api/v1/pages",
headers={
"Origin": "https://www.goodwalk.co.nz",
"Access-Control-Request-Method": "GET",
},
)
acao = resp.headers.get("access-control-allow-origin", "")
assert acao == "https://www.goodwalk.co.nz"
# ── API9 API inventory and documentation exposure ────────────────────────────
class TestAPIInventory:
"""ASVS V14 | API9:2023 — The API surface is intentional and known."""
async def test_health_endpoint_returns_ok(self, client: AsyncClient):
"""Health check endpoint is reachable and returns structured JSON."""
resp = await client.get("/health")
assert resp.status_code == 200
assert resp.json() == {"status": "ok"}
async def test_openapi_schema_is_accessible(self, client: AsyncClient):
"""API9:2023 — OpenAPI schema is reachable (intentional; document as known exposure).
In production this endpoint should be removed or IP-restricted.
This test records the current state: it is publicly accessible.
The schema must not contain connection strings or private server details.
"""
resp = await client.get("/openapi.json")
assert resp.status_code == 200
schema = resp.json()
assert "paths" in schema
schema_str = resp.text.lower()
# Connection string or internal host details must not appear
assert "postgresql" not in schema_str
assert "asyncpg" not in schema_str
assert "localhost:5432" not in schema_str
async def test_swagger_ui_not_publicly_accessible_in_production(
self, client: AsyncClient
):
"""API9:2023 — Interactive API documentation should not be public in production."""
resp = await client.get("/docs")
assert resp.status_code in (403, 404)
+328
View File
@@ -0,0 +1,328 @@
"""
Input validation and output sanitization security tests.
Control coverage
OWASP ASVS v4.0 V5 Input Validation, Sanitization and Encoding
OWASP API Top 10 API4:2023 Unrestricted Resource Consumption (payload size)
API8:2023 Security Misconfiguration (missing sanitization)
"""
import pytest
from httpx import AsyncClient
pytestmark = pytest.mark.asyncio
# ── V5.1 Input validation ────────────────────────────────────────────────────
class TestSchemaValidation:
"""ASVS V5.1 — All inputs are validated against declared schemas before processing."""
async def test_required_fields_enforced_on_page_create(
self, client: AsyncClient, admin_token: str
):
"""ASVS 5.1.1 — Missing required fields return 422 Unprocessable Entity."""
resp = await client.post(
"/api/v1/pages",
json={"title": "No slug or body"},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 422
async def test_required_fields_enforced_on_post_create(
self, client: AsyncClient, admin_token: str
):
"""ASVS 5.1.1 — Blog post creation also enforces required fields."""
resp = await client.post(
"/api/v1/posts",
json={"title": "No body or slug"},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 422
async def test_null_body_field_rejected(self, client: AsyncClient, admin_token: str):
"""ASVS 5.1.1 — Explicit null for a required string field returns 422."""
resp = await client.post(
"/api/v1/pages",
json={"title": "Test", "slug": "test-null-body", "body": None},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 422
async def test_event_type_max_length_enforced(self, client: AsyncClient):
"""ASVS 5.1.3 | API4 — Analytics event_type over 64 chars returns 422."""
resp = await client.post(
"/api/web/event",
json={"event_type": "x" * 65, "page": "/", "session_id": "s1"},
)
assert resp.status_code == 422
async def test_event_page_max_length_enforced(self, client: AsyncClient):
"""ASVS 5.1.3 | API4 — Analytics page field over 255 chars returns 422."""
resp = await client.post(
"/api/web/event",
json={"event_type": "page_view", "page": "x" * 256, "session_id": "s1"},
)
assert resp.status_code == 422
async def test_event_element_max_length_enforced(self, client: AsyncClient):
"""ASVS 5.1.3 | API4 — Analytics element field over 255 chars returns 422."""
resp = await client.post(
"/api/web/event",
json={
"event_type": "click",
"page": "/",
"element": "x" * 256,
"session_id": "s1",
},
)
assert resp.status_code == 422
async def test_malformed_json_body_returns_422_not_500(
self, client: AsyncClient, admin_token: str
):
"""ASVS 5.1.1 — Malformed JSON body returns 422, not 500 Internal Server Error."""
resp = await client.post(
"/api/v1/pages",
content=b"{not valid json{{",
headers={
"Authorization": f"Bearer {admin_token}",
"Content-Type": "application/json",
},
)
assert resp.status_code == 422
assert resp.status_code != 500
# ── V5.2 Sanitization — stored XSS via HTML body ────────────────────────────
class TestHTMLSanitization:
"""ASVS V5.2 | API8 — HTML body is sanitized by nh3 before storage.
Pages and blog posts accept rich HTML. nh3 (Rust/ammonia) strips disallowed
elements and attributes before the content reaches the database. All XSS
vectors tested here must be absent from the stored body.
"""
async def _create_page_body(
self, client: AsyncClient, token: str, slug: str, body: str
) -> str:
resp = await client.post(
"/api/v1/pages",
json={"title": "XSS test", "slug": slug, "body": body, "published": True},
headers={"Authorization": f"Bearer {token}"},
)
assert resp.status_code == 201, f"Page create failed: {resp.text}"
return resp.json()["body"]
async def test_script_tag_stripped(self, client: AsyncClient, admin_token: str):
"""ASVS 5.2.1 — <script> tags are removed from stored HTML."""
body = await self._create_page_body(
client, admin_token, "xss-script",
'<p>Hello</p><script>alert("xss")</script>',
)
assert "<script" not in body
assert "alert" not in body
async def test_onerror_event_handler_stripped(self, client: AsyncClient, admin_token: str):
"""ASVS 5.2.1 — onerror and other on* event attributes are removed."""
body = await self._create_page_body(
client, admin_token, "xss-onerror",
'<img src="x" onerror="alert(1)">',
)
assert "onerror" not in body
async def test_onclick_attribute_stripped(self, client: AsyncClient, admin_token: str):
"""ASVS 5.2.1 — onclick event attribute is removed from anchor tags."""
body = await self._create_page_body(
client, admin_token, "xss-onclick",
'<a href="/page" onclick="stealCookies()">Click</a>',
)
assert "onclick" not in body
async def test_javascript_href_stripped(self, client: AsyncClient, admin_token: str):
"""ASVS 5.2.1 — javascript: URI scheme in href is sanitized."""
body = await self._create_page_body(
client, admin_token, "xss-js-href",
'<a href="javascript:alert(document.cookie)">Click</a>',
)
assert "javascript:" not in body
async def test_iframe_stripped(self, client: AsyncClient, admin_token: str):
"""ASVS 5.2.1 — <iframe> elements are removed entirely."""
body = await self._create_page_body(
client, admin_token, "xss-iframe",
'<p>Content</p><iframe src="https://evil.example.com"></iframe>',
)
assert "<iframe" not in body
async def test_object_tag_stripped(self, client: AsyncClient, admin_token: str):
"""ASVS 5.2.1 — <object> elements (legacy plugin vector) are removed."""
body = await self._create_page_body(
client, admin_token, "xss-object",
'<object data="data:text/html,<script>alert(1)</script>"></object>',
)
assert "<object" not in body
async def test_safe_html_is_preserved(self, client: AsyncClient, admin_token: str):
"""ASVS 5.2.1 — Legitimate formatting tags survive sanitization intact.
Sanitization must not strip safe elements like <p>, <strong>, <em>,
<ul>, <li>, or ordinary <a href> links.
"""
safe = (
"<p>Hello <strong>world</strong>. "
"<a href='/about'>Learn more</a>.</p>"
"<ul><li>Item one</li><li>Item two</li></ul>"
)
body = await self._create_page_body(client, admin_token, "xss-safe", safe)
assert "<p>" in body
assert "<strong>" in body
assert "<a" in body
assert "<ul>" in body
async def test_blog_post_body_sanitized(self, client: AsyncClient, admin_token: str):
"""ASVS 5.2.1 — Blog post bodies go through the same nh3 sanitization."""
resp = await client.post(
"/api/v1/posts",
json={
"title": "XSS Post",
"slug": "xss-post-sanitize",
"body": '<script>document.cookie="stolen"</script><p>Content</p>',
"published": True,
},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 201
assert "<script" not in resp.json()["body"]
async def test_xss_in_update_also_sanitized(self, client: AsyncClient, admin_token: str):
"""ASVS 5.2.1 — XSS payload submitted via PUT update is also sanitized."""
await client.post(
"/api/v1/pages",
json={"title": "Initial", "slug": "xss-update", "body": "<p>Safe</p>"},
headers={"Authorization": f"Bearer {admin_token}"},
)
resp = await client.put(
"/api/v1/pages/xss-update",
json={"body": '<script>evil()</script><p>updated</p>'},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
assert "<script" not in resp.json()["body"]
# ── V5.3 SQL injection prevention ───────────────────────────────────────────
class TestSQLInjection:
"""ASVS V5.3 — Parameterized queries prevent SQL injection at every input boundary."""
@pytest.mark.parametrize("injection", [
"' OR '1'='1",
"1; DROP TABLE pages; --",
"' UNION SELECT email,hashed_password,1,1,1,1,1 FROM users --",
"admin'--",
"'; INSERT INTO users(email) VALUES('pwned@evil.com'); --",
])
async def test_sql_injection_in_slug_does_not_500(
self, client: AsyncClient, injection: str
):
"""ASVS 5.3.4 — SQL injection strings in slug path parameters return 404, not 500.
SQLAlchemy passes slug as a bind parameter; the string is never
interpolated into a query. A 404 means the slug simply wasn't found.
"""
resp = await client.get(f"/api/v1/pages/{injection}")
assert resp.status_code != 500, (
f"500 for SQL injection slug {injection!r} — possible unparameterised query"
)
@pytest.mark.parametrize("email", [
"admin@example.com' OR '1'='1' --",
"' OR 1=1; --",
"admin@example.com'/*",
"'; DROP TABLE users; --",
])
async def test_sql_injection_in_login_email_does_not_bypass_auth(
self, client: AsyncClient, email: str
):
"""ASVS 5.3.4 | API2 — SQL injection in the login email field returns 401, not 200.
A vulnerable query like "WHERE email = '{email}'" would return all rows
with a crafted OR clause, bypassing authentication.
"""
resp = await client.post(
"/api/v1/auth/login",
json={"email": email, "password": "password"},
)
assert resp.status_code == 401, (
f"Expected 401 for injected email {email!r}, got {resp.status_code}"
)
assert resp.status_code != 500
# ── Analytics metadata sanitization ──────────────────────────────────────────
class TestAnalyticsMetadataSanitization:
"""ASVS V5.1 | API8 — Analytics event metadata is whitelist-sanitized server-side.
Only 9 pre-approved keys are persisted. Values are capped at 120 chars.
Nested objects and unknown keys are silently dropped.
"""
async def _post_event(self, client: AsyncClient, metadata: dict) -> int:
resp = await client.post(
"/api/web/event",
json={
"event_type": "page_view",
"page": "/",
"session_id": "meta-test",
"metadata": metadata,
},
)
return resp.status_code
async def test_unknown_metadata_key_is_dropped(self, client: AsyncClient):
"""ASVS 5.1.1 | API8 — Keys outside the allowlist are silently removed."""
status = await self._post_event(
client, {"evil_key": "bad value", "plan": "dog-walks"}
)
assert status == 201
async def test_nested_object_in_metadata_dropped(self, client: AsyncClient):
"""ASVS 5.1.1 | API8 — Nested dict values are dropped (no recursive storage)."""
status = await self._post_event(
client, {"plan": {"deeply": {"nested": "object"}}}
)
assert status == 201
async def test_prototype_pollution_keys_dropped(self, client: AsyncClient):
"""ASVS 5.1.1 | API8 — __proto__ and constructor keys are rejected by the allowlist."""
status = await self._post_event(
client,
{
"__proto__": {"isAdmin": True},
"constructor": {"name": "attack"},
"plan": "safe-value",
},
)
assert status == 201
async def test_oversized_string_value_is_accepted(self, client: AsyncClient):
"""ASVS 5.1.3 — Metadata string values longer than 120 chars are truncated, not errored."""
status = await self._post_event(client, {"plan": "x" * 500})
assert status == 201
async def test_null_metadata_accepted(self, client: AsyncClient):
"""ASVS 5.1.1 — Null metadata field is valid and accepted."""
resp = await client.post(
"/api/web/event",
json={"event_type": "page_view", "page": "/", "session_id": "null-meta", "metadata": None},
)
assert resp.status_code == 201
async def test_large_metadata_object_does_not_crash(self, client: AsyncClient):
"""ASVS 5.1.3 | API4 — Metadata with many keys (mostly unknown) is handled safely."""
big_meta = {f"key_{i}": f"value_{i}" for i in range(200)}
status = await self._post_event(client, big_meta)
assert status != 500
+251
View File
@@ -0,0 +1,251 @@
"""
Rate limiting, resource consumption, and SSRF mitigation tests.
Control coverage
OWASP ASVS v4.0 V13 API and Web Service Verification
OWASP API Top 10 API4:2023 Unrestricted Resource Consumption
API6:2023 Unrestricted Access to Sensitive Business Flows
API7:2023 Server Side Request Forgery (SSRF)
"""
import pytest
from httpx import AsyncClient
pytestmark = pytest.mark.asyncio
# ── V13.2 / API4 Rate limit presence ────────────────────────────────────────
class TestRateLimitHeaders:
"""ASVS V13.2 | API4 — Sensitive endpoints advertise rate-limit headers.
slowapi can emit X-RateLimit-* headers when headers_enabled=True is passed
to the Limiter constructor in app/middleware/rate_limit.py.
"""
async def test_login_endpoint_exposes_rate_limit_headers(
self, client: AsyncClient, admin_user
):
"""ASVS 13.2.1 | API4 — /auth/login returns X-RateLimit-* response headers.
Advertising limits allows legitimate clients to back off gracefully.
The configured limit is 5 requests/minute.
"""
resp = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "testpassword"},
)
assert resp.status_code == 200
headers_lower = {k.lower(): v for k, v in resp.headers.items()}
assert "x-ratelimit-limit" in headers_lower
assert "x-ratelimit-remaining" in headers_lower
async def test_analytics_ingest_exposes_rate_limit_headers(
self, client: AsyncClient
):
"""ASVS 13.2.1 | API4 — Analytics ingest endpoint returns X-RateLimit-* headers.
The analytics endpoint is public and rate-limited to 60 requests/minute.
"""
resp = await client.post(
"/api/web/event",
json={"event_type": "page_view", "page": "/", "session_id": "rl-test"},
)
assert resp.status_code == 201
headers_lower = {k.lower(): v for k, v in resp.headers.items()}
assert "x-ratelimit-limit" in headers_lower
async def test_refresh_endpoint_exposes_rate_limit_headers(
self, client: AsyncClient, admin_user
):
"""ASVS 13.2.1 | API4 — /auth/refresh returns rate-limit headers (5/minute)."""
login = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "testpassword"},
)
refresh_token = login.json()["refresh_token"]
resp = await client.post(
"/api/v1/auth/refresh",
json={"refresh_token": refresh_token},
)
assert resp.status_code == 200
headers_lower = {k.lower(): v for k, v in resp.headers.items()}
assert "x-ratelimit-limit" in headers_lower
# ── API4 Payload size limits ────────────────────────────────────────────────
class TestPayloadSizeLimits:
"""API4:2023 — Oversized payloads are rejected without crashing the server."""
async def test_oversized_event_page_path_rejected(self, client: AsyncClient):
"""API4:2023 — Analytics page field exceeding max_length returns 422."""
resp = await client.post(
"/api/web/event",
json={
"event_type": "page_view",
"page": "/" + "x" * 10_000,
"session_id": "size-test",
},
)
assert resp.status_code == 422
assert resp.status_code != 500
async def test_large_page_body_does_not_500(
self, client: AsyncClient, admin_token: str
):
"""API4:2023 — A 100 KB page body does not crash the server.
FastAPI / Starlette has a default body size limit. Large payloads should
either be accepted (nh3 can handle them) or rejected with 413/422.
A 500 would indicate unhandled processing failure.
"""
large_body = "<p>" + "A" * 100_000 + "</p>"
resp = await client.post(
"/api/v1/pages",
json={
"title": "Big Page",
"slug": "big-page-payload",
"body": large_body,
"published": False,
},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code != 500
async def test_deeply_nested_json_does_not_500(
self, client: AsyncClient, admin_token: str
):
"""API4:2023 — Highly nested JSON body (potential stack-overflow vector) is handled."""
# Build a deeply nested dict: {"a": {"a": {"a": ... }}}
nested: dict = {}
node = nested
for _ in range(50):
node["a"] = {}
node = node["a"]
resp = await client.post(
"/api/v1/pages",
json={"title": "Nested", "slug": "nested-json", "body": str(nested)},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code != 500
# ── API7 SSRF — private IP suppression in geo-lookup ────────────────────────
class TestSSRFMitigation:
"""API7:2023 SSRF — The analytics geo-lookup must not forward private IPs externally.
_geo_lookup() in app/routers/analytics.py checks for private IP prefixes
and returns (None, None) immediately, preventing the server from making
outbound requests to ip-api.com with internal addresses.
"""
@pytest.mark.parametrize("private_ip", [
"127.0.0.1",
"10.0.0.1",
"10.255.255.255",
"192.168.1.100",
"172.16.0.1",
"172.31.255.255",
"::1",
"localhost",
])
async def test_private_ip_in_xff_does_not_cause_error(
self, client: AsyncClient, private_ip: str
):
"""API7:2023 — Private/loopback IP in X-Forwarded-For is handled safely.
The event is still recorded (201); geo fields will be null. The server
must not error or make an outbound call for private addresses.
"""
resp = await client.post(
"/api/web/event",
json={"event_type": "page_view", "page": "/", "session_id": "ssrf-test"},
headers={"X-Forwarded-For": private_ip},
)
assert resp.status_code == 201, (
f"Expected 201 for private IP {private_ip!r}, got {resp.status_code}"
)
@pytest.mark.parametrize("xff", [
"not-an-ip",
"999.999.999.999",
",,,",
"127.0.0.1, 10.0.0.1, attacker.example.com",
"",
])
async def test_malformed_xff_does_not_cause_500(
self, client: AsyncClient, xff: str
):
"""API7:2023 — Malformed X-Forwarded-For header is handled without crashing."""
resp = await client.post(
"/api/web/event",
json={"event_type": "page_view", "page": "/", "session_id": "xff-malform"},
headers={"X-Forwarded-For": xff},
)
assert resp.status_code != 500, (
f"500 for X-Forwarded-For: {xff!r}"
)
# ── API6 Sensitive business-flow controls ────────────────────────────────────
class TestBusinessFlowProtection:
"""API6:2023 — Sensitive or high-volume flows have appropriate access controls."""
async def test_analytics_ingest_is_intentionally_public(self, client: AsyncClient):
"""API6:2023 — Anonymous event ingestion is by design; rate limiting is the control.
This test documents the intentional decision: any browser can POST to
/api/web/event without credentials. The 60 req/min rate limit and
metadata whitelist are the primary abuse-prevention controls.
"""
resp = await client.post(
"/api/web/event",
json={"event_type": "page_view", "page": "/about", "session_id": "anon"},
)
assert resp.status_code == 201
async def test_analytics_read_requires_authentication(self, client: AsyncClient):
"""API6:2023 | API5 — Aggregated analytics data (business intelligence) is auth-gated.
Public write / authenticated read is the intended access pattern.
"""
resp = await client.get("/api/v1/analytics/summary")
assert resp.status_code in (401, 403)
async def test_session_cookie_is_httponly(self, client: AsyncClient):
"""ASVS 3.4.2 | API6 — The anonymous session cookie is HttpOnly.
HttpOnly prevents JavaScript from reading the cookie, mitigating
session hijacking via XSS.
"""
resp = await client.post(
"/api/web/event",
json={"event_type": "page_view", "page": "/", "session_id": None},
)
assert resp.status_code == 201
set_cookie = resp.headers.get("set-cookie", "")
if set_cookie:
assert "httponly" in set_cookie.lower(), (
"Session cookie must be HttpOnly"
)
async def test_session_cookie_is_samesite_lax(self, client: AsyncClient):
"""ASVS 3.4.3 | API6 — The anonymous session cookie has SameSite=Lax.
SameSite=Lax blocks the cookie from being sent in cross-site POST
requests, protecting against CSRF on cookie-authenticated flows.
"""
resp = await client.post(
"/api/web/event",
json={"event_type": "page_view", "page": "/", "session_id": None},
)
assert resp.status_code == 201
set_cookie = resp.headers.get("set-cookie", "")
if set_cookie:
assert "samesite=lax" in set_cookie.lower(), (
"Session cookie must be SameSite=Lax"
)
@@ -0,0 +1,773 @@
"""
Extended tests for admin-facing members and booking endpoints.
Covers endpoints not exercised in test_members.py:
- GET /settings/features fetch global member feature flags
- PUT /settings/features update global member feature flags
- GET /admin/members/{member_id} fetch a single member record
- GET /admin/members/{member_id}/walks admin view of a member's walks
- GET /admin/members/{member_id}/bookings admin view of a member's bookings
- GET /admin/bookings list all bookings across members
- POST /admin/bookings create a booking on behalf of a member
- PUT /admin/bookings/{booking_id} update booking status / notes
- GET /admin/messages message history with read status
- GET /admin/notifications actionable admin notification feed
- GET /admin/notifications/settings fetch notification config
- PUT /admin/notifications/settings update notification config
- POST /admin/notifications/run manually trigger notification run
"""
import pytest
from datetime import datetime, timezone
from httpx import AsyncClient
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.audit import AuditLog
from app.models.contact_lead import ContactLead
from app.models.member import Member, Walk, Booking, AdminMessage
from app.models.settings import SiteSettings
from app.auth.password import hash_password
pytestmark = [pytest.mark.asyncio, pytest.mark.members_admin]
# ── Helpers ────────────────────────────────────────────────────────────────────
async def _member(
db: AsyncSession,
email: str = "m@example.com",
claimed: bool = True,
status: str = "active",
) -> Member:
m = Member(
email=email,
first_name="Jane",
last_name="Doe",
phone="021 000 0000",
is_claimed=claimed,
is_active=True,
member_status=status,
hashed_password=hash_password("Password1!") if claimed else None,
onboarding_data={"dog_name": "Rex"},
)
db.add(m)
await db.commit()
await db.refresh(m)
return m
async def _walk(db: AsyncSession, member_id) -> Walk:
w = Walk(
member_id=member_id,
walked_at=datetime(2026, 3, 15, 9, 0, tzinfo=timezone.utc),
service_type="pack_walk",
duration_minutes=60,
notes="Test walk",
recorded_by="admin@example.com",
)
db.add(w)
await db.commit()
await db.refresh(w)
return w
async def _booking(db: AsyncSession, member_id) -> Booking:
b = Booking(
member_id=member_id,
service_type="pack_walk",
status="pending",
notes="Morning preferred",
)
db.add(b)
await db.commit()
await db.refresh(b)
return b
async def _lead(db: AsyncSession, email: str = "lead@example.com") -> ContactLead:
lead = ContactLead(
full_name="Alex Prospect",
email=email,
phone="021 222 2222",
suburb="Devonport",
pet_name="Milo",
status="invite",
)
db.add(lead)
await db.commit()
await db.refresh(lead)
return lead
# ── GET /admin/members/{member_id} ─────────────────────────────────────────────
async def test_admin_get_member(client: AsyncClient, admin_token: str, db_session: AsyncSession):
member = await _member(db_session, "getme@example.com")
resp = await client.get(
f"/api/v1/admin/members/{member.id}",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
data = resp.json()
assert data["id"] == str(member.id)
assert data["email"] == "getme@example.com"
assert data["first_name"] == "Jane"
async def test_admin_get_member_not_found(client: AsyncClient, admin_token: str):
import uuid
resp = await client.get(
f"/api/v1/admin/members/{uuid.uuid4()}",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 404
async def test_admin_get_member_requires_auth(client: AsyncClient, db_session: AsyncSession):
member = await _member(db_session, "noauth@example.com")
resp = await client.get(f"/api/v1/admin/members/{member.id}")
assert resp.status_code in (401, 403)
# ── GET /admin/members/{member_id}/walks ───────────────────────────────────────
async def test_admin_get_member_walks_empty(client: AsyncClient, admin_token: str, db_session: AsyncSession):
member = await _member(db_session, "nowalks@example.com")
resp = await client.get(
f"/api/v1/admin/members/{member.id}/walks",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
assert resp.json() == []
async def test_admin_get_member_walks_with_data(client: AsyncClient, admin_token: str, db_session: AsyncSession):
member = await _member(db_session, "haswalks@example.com")
await _walk(db_session, member.id)
await _walk(db_session, member.id)
resp = await client.get(
f"/api/v1/admin/members/{member.id}/walks",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
assert len(resp.json()) == 2
async def test_admin_get_member_walks_only_own(client: AsyncClient, admin_token: str, db_session: AsyncSession):
"""Walks returned belong only to the requested member, not all members."""
m1 = await _member(db_session, "walker1@example.com")
m2 = await _member(db_session, "walker2@example.com")
await _walk(db_session, m1.id)
await _walk(db_session, m2.id)
await _walk(db_session, m2.id)
resp = await client.get(
f"/api/v1/admin/members/{m1.id}/walks",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
assert len(resp.json()) == 1
# ── GET /admin/members/{member_id}/bookings ────────────────────────────────────
async def test_admin_get_member_bookings_empty(client: AsyncClient, admin_token: str, db_session: AsyncSession):
member = await _member(db_session, "nobookings@example.com")
resp = await client.get(
f"/api/v1/admin/members/{member.id}/bookings",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
assert resp.json() == []
async def test_admin_get_member_bookings_with_data(client: AsyncClient, admin_token: str, db_session: AsyncSession):
member = await _member(db_session, "hasbookings@example.com")
await _booking(db_session, member.id)
resp = await client.get(
f"/api/v1/admin/members/{member.id}/bookings",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
data = resp.json()
assert len(data) == 1
assert data[0]["service_type"] == "pack_walk"
assert data[0]["status"] == "pending"
async def test_admin_get_member_bookings_only_own(client: AsyncClient, admin_token: str, db_session: AsyncSession):
"""Bookings returned belong only to the requested member."""
m1 = await _member(db_session, "bk1@example.com")
m2 = await _member(db_session, "bk2@example.com")
await _booking(db_session, m1.id)
await _booking(db_session, m2.id)
resp = await client.get(
f"/api/v1/admin/members/{m2.id}/bookings",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
assert len(resp.json()) == 1
# ── GET /admin/bookings ────────────────────────────────────────────────────────
async def test_admin_list_bookings_empty(client: AsyncClient, admin_token: str):
resp = await client.get(
"/api/v1/admin/bookings",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
assert resp.json() == []
async def test_admin_list_bookings_includes_member_details(
client: AsyncClient, admin_token: str, db_session: AsyncSession
):
"""Each booking in the admin list includes the member's name and email."""
member = await _member(db_session, "listbooking@example.com")
await _booking(db_session, member.id)
resp = await client.get(
"/api/v1/admin/bookings",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
data = resp.json()
assert len(data) == 1
assert data[0]["member_email"] == "listbooking@example.com"
assert data[0]["member_first_name"] == "Jane"
assert data[0]["member_last_name"] == "Doe"
async def test_admin_list_bookings_multiple_members(
client: AsyncClient, admin_token: str, db_session: AsyncSession
):
m1 = await _member(db_session, "mb1@example.com")
m2 = await _member(db_session, "mb2@example.com")
await _booking(db_session, m1.id)
await _booking(db_session, m2.id)
resp = await client.get(
"/api/v1/admin/bookings",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
assert len(resp.json()) == 2
async def test_admin_list_bookings_requires_auth(client: AsyncClient):
resp = await client.get("/api/v1/admin/bookings")
assert resp.status_code in (401, 403)
async def test_admin_bookings_feature_can_be_disabled(
client: AsyncClient, admin_token: str, db_session: AsyncSession
):
member = await _member(db_session, "disabledbookings@example.com")
await _booking(db_session, member.id)
db_session.add(SiteSettings(site_name="", bookings_enabled=False))
await db_session.commit()
resp = await client.get(
"/api/v1/admin/bookings",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 404
# ── PUT /admin/bookings/{booking_id} ──────────────────────────────────────────
async def test_admin_create_booking(
client: AsyncClient, admin_token: str, db_session: AsyncSession
):
member = await _member(db_session, "createbooking@example.com")
resp = await client.post(
"/api/v1/admin/bookings",
headers={"Authorization": f"Bearer {admin_token}"},
json={
"member_id": str(member.id),
"service_type": "1_1_walk",
"requested_date": "2026-04-09T07:00:00Z",
"status": "confirmed",
"admin_notes": "Created from mobile planner",
},
)
assert resp.status_code == 201
data = resp.json()
assert data["member_id"] == str(member.id)
assert data["service_type"] == "1_1_walk"
assert data["status"] == "confirmed"
assert data["member_email"] == "createbooking@example.com"
assert data["member_dog_name"] == "Rex"
async def test_admin_create_booking_requires_auth(client: AsyncClient, db_session: AsyncSession):
member = await _member(db_session, "createbooking-noauth@example.com")
resp = await client.post(
"/api/v1/admin/bookings",
json={
"member_id": str(member.id),
"service_type": "pack_walk",
},
)
assert resp.status_code in (401, 403)
async def test_admin_create_booking_feature_can_be_disabled(
client: AsyncClient, admin_token: str, db_session: AsyncSession
):
member = await _member(db_session, "createbooking-disabled@example.com")
db_session.add(SiteSettings(site_name="", bookings_enabled=False))
await db_session.commit()
resp = await client.post(
"/api/v1/admin/bookings",
headers={"Authorization": f"Bearer {admin_token}"},
json={
"member_id": str(member.id),
"service_type": "pack_walk",
},
)
assert resp.status_code == 404
# ── PUT /admin/bookings/{booking_id} ──────────────────────────────────────────
async def test_admin_update_booking_status(
client: AsyncClient, admin_token: str, db_session: AsyncSession
):
member = await _member(db_session, "updatebooking@example.com")
booking = await _booking(db_session, member.id)
resp = await client.put(
f"/api/v1/admin/bookings/{booking.id}",
headers={"Authorization": f"Bearer {admin_token}"},
json={"status": "confirmed"},
)
assert resp.status_code == 200
data = resp.json()
assert data["status"] == "confirmed"
assert data["id"] == str(booking.id)
async def test_admin_update_booking_admin_notes(
client: AsyncClient, admin_token: str, db_session: AsyncSession
):
member = await _member(db_session, "booknotes@example.com")
booking = await _booking(db_session, member.id)
resp = await client.put(
f"/api/v1/admin/bookings/{booking.id}",
headers={"Authorization": f"Bearer {admin_token}"},
json={"admin_notes": "Revised to afternoon slot"},
)
assert resp.status_code == 200
assert resp.json()["admin_notes"] == "Revised to afternoon slot"
async def test_admin_update_booking_requested_date(
client: AsyncClient, admin_token: str, db_session: AsyncSession
):
member = await _member(db_session, "bookmove@example.com")
booking = await _booking(db_session, member.id)
moved_to = "2026-04-09T13:00:00Z"
resp = await client.put(
f"/api/v1/admin/bookings/{booking.id}",
headers={"Authorization": f"Bearer {admin_token}"},
json={"requested_date": moved_to, "admin_notes": "Moved to PM route"},
)
assert resp.status_code == 200
data = resp.json()
assert data["requested_date"] == moved_to
assert data["admin_notes"] == "Moved to PM route"
async def test_admin_update_booking_not_found(client: AsyncClient, admin_token: str):
import uuid
resp = await client.put(
f"/api/v1/admin/bookings/{uuid.uuid4()}",
headers={"Authorization": f"Bearer {admin_token}"},
json={"status": "confirmed"},
)
assert resp.status_code == 404
async def test_admin_update_booking_includes_member_details(
client: AsyncClient, admin_token: str, db_session: AsyncSession
):
"""Response includes member name and email even after update."""
member = await _member(db_session, "updatebk2@example.com")
booking = await _booking(db_session, member.id)
resp = await client.put(
f"/api/v1/admin/bookings/{booking.id}",
headers={"Authorization": f"Bearer {admin_token}"},
json={"status": "cancelled"},
)
assert resp.status_code == 200
data = resp.json()
assert data["member_email"] == "updatebk2@example.com"
assert data["status"] == "cancelled"
async def test_admin_can_archive_member(client: AsyncClient, admin_token: str, db_session: AsyncSession):
member = await _member(db_session, "archive-me@example.com")
resp = await client.post(
f"/api/v1/admin/members/{member.id}/archive",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
data = resp.json()
assert data["member_status"] == "archived"
assert data["is_active"] is False
async def test_admin_can_deactivate_member(client: AsyncClient, admin_token: str, db_session: AsyncSession):
member = await _member(db_session, "deactivate-me@example.com")
resp = await client.post(
f"/api/v1/admin/members/{member.id}/deactivate",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
assert resp.json()["is_active"] is False
async def test_admin_can_toggle_member_force_two_factor(client: AsyncClient, admin_token: str, db_session: AsyncSession):
member = await _member(db_session, "force-toggle@example.com")
resp = await client.post(
f"/api/v1/admin/members/{member.id}/force-2fa",
headers={"Authorization": f"Bearer {admin_token}"},
json={"enabled": True},
)
assert resp.status_code == 200
assert resp.json()["force_two_factor"] is True
async def test_admin_can_reset_member_password(client: AsyncClient, admin_token: str, db_session: AsyncSession):
member = await _member(db_session, "reset-password@example.com")
resp = await client.post(
f"/api/v1/admin/members/{member.id}/reset-password",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
assert resp.json()["is_claimed"] is False
await db_session.refresh(member)
assert member.hashed_password is None
# ── GET /admin/notifications/settings ─────────────────────────────────────────
async def test_admin_list_messages_history(client: AsyncClient, admin_token: str, db_session: AsyncSession):
member = await _member(db_session, "history@example.com")
message = AdminMessage(
member_id=member.id,
subject="Walk update",
body="Tomorrow's slot is confirmed.",
sent_by="admin@example.com",
read_at=datetime(2026, 4, 1, 9, 30, tzinfo=timezone.utc),
)
db_session.add(message)
await db_session.commit()
resp = await client.get(
"/api/v1/admin/messages",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
data = resp.json()
assert len(data) == 1
assert data[0]["member_email"] == "history@example.com"
assert data[0]["subject"] == "Walk update"
assert data[0]["read_at"] is not None
async def test_admin_list_messages_requires_auth(client: AsyncClient):
resp = await client.get("/api/v1/admin/messages")
assert resp.status_code in (401, 403)
async def test_admin_list_notifications(client: AsyncClient, admin_token: str, db_session: AsyncSession):
member = await _member(db_session, "notify@example.com", status="pending_review")
booking = await _booking(db_session, member.id)
lead = await _lead(db_session, "notifylead@example.com")
db_session.add_all(
[
AuditLog(
member_id=member.id,
member_email=member.email,
action_type="login",
area="members/login",
description="Member logged in successfully.",
status="success",
timestamp=datetime.now(timezone.utc),
),
AuditLog(
member_id=member.id,
member_email=member.email,
action_type="logout",
area="members/logout",
description="Member ended their session.",
status="success",
timestamp=datetime.now(timezone.utc),
),
]
)
await db_session.commit()
resp = await client.get(
"/api/v1/admin/notifications",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
data = resp.json()
assert "items" in data
assert "settings" in data
assert data["total"] >= 3
item_types = {item["type"] for item in data["items"]}
assert "pending_booking" in item_types
assert "new_lead" in item_types
assert "pending_review" in item_types
assert "member_login" in item_types
assert "member_logout" in item_types
hrefs = {item["id"]: item["href"] for item in data["items"]}
assert hrefs[str(booking.id)] == "/admin/bookings"
assert hrefs[str(lead.id)] == "/admin/leads"
assert hrefs[str(member.id)] == f"/admin/members/{member.id}"
session_hrefs = {item["type"]: item["href"] for item in data["items"] if item["type"] in {"member_login", "member_logout"}}
assert session_hrefs["member_login"] == f"/admin/members/{member.id}"
assert session_hrefs["member_logout"] == f"/admin/members/{member.id}"
async def test_admin_list_notifications_requires_auth(client: AsyncClient):
resp = await client.get("/api/v1/admin/notifications")
assert resp.status_code in (401, 403)
async def test_admin_get_notification_settings(client: AsyncClient, admin_token: str):
resp = await client.get(
"/api/v1/admin/notifications/settings",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
data = resp.json()
assert "automatic_member_notifications_enabled" in data
assert "nz_public_holiday_notifications_enabled" in data
assert "invoice_reminder_notifications_enabled" in data
assert "invoice_day_of_week" in data
assert isinstance(data["invoice_day_of_week"], int)
async def test_admin_get_notification_settings_requires_auth(client: AsyncClient):
resp = await client.get("/api/v1/admin/notifications/settings")
assert resp.status_code in (401, 403)
# ── GET /settings/features ────────────────────────────────────────────────────
async def test_get_feature_settings_defaults(client: AsyncClient):
resp = await client.get("/api/v1/settings/features")
assert resp.status_code == 200
assert resp.json() == {
"bookings_enabled": True,
"walks_enabled": True,
"messages_enabled": True,
"two_factor_enabled": True,
"audit_history_enabled": True,
"experiments_enabled": True,
}
# ── PUT /settings/features ────────────────────────────────────────────────────
async def test_update_feature_settings(client: AsyncClient, admin_token: str):
resp = await client.put(
"/api/v1/settings/features",
headers={"Authorization": f"Bearer {admin_token}"},
json={
"bookings_enabled": False,
"walks_enabled": True,
"messages_enabled": False,
"two_factor_enabled": False,
"audit_history_enabled": False,
"experiments_enabled": False,
},
)
assert resp.status_code == 200
assert resp.json() == {
"bookings_enabled": False,
"walks_enabled": True,
"messages_enabled": False,
"two_factor_enabled": False,
"audit_history_enabled": False,
"experiments_enabled": False,
}
async def test_update_feature_settings_requires_auth(client: AsyncClient):
resp = await client.put(
"/api/v1/settings/features",
json={"bookings_enabled": False},
)
assert resp.status_code in (401, 403)
async def test_get_service_pricing_defaults(client: AsyncClient):
resp = await client.get("/api/v1/settings/pricing")
assert resp.status_code == 200
data = resp.json()["service_pricing"]
assert data["pack_walk"]["amount"] == 58.0
assert data["1_1_walk"]["amount"] == 45.0
assert data["puppy_visit"]["amount"] == 39.0
async def test_update_service_pricing(client: AsyncClient, admin_token: str):
resp = await client.put(
"/api/v1/settings/pricing",
headers={"Authorization": f"Bearer {admin_token}"},
json={
"service_pricing": {
"pack_walk": {"amount": 61, "label": "Pack Walk", "unit": "per walk"},
"1_1_walk": {"amount": 52, "label": "1-1 Walk", "unit": "per walk"},
"puppy_visit": {"amount": 44, "label": "Puppy Visit", "unit": "per visit"},
}
},
)
assert resp.status_code == 200
data = resp.json()["service_pricing"]
assert data["pack_walk"]["amount"] == 61.0
assert data["1_1_walk"]["amount"] == 52.0
assert data["puppy_visit"]["amount"] == 44.0
async def test_update_service_pricing_requires_auth(client: AsyncClient):
resp = await client.put(
"/api/v1/settings/pricing",
json={"service_pricing": {"pack_walk": {"amount": 62}}},
)
assert resp.status_code in (401, 403)
async def test_admin_audit_history_returns_404_when_disabled(
client: AsyncClient,
admin_token: str,
db_session: AsyncSession,
):
db_session.add(SiteSettings(site_name="", audit_history_enabled=False))
await db_session.commit()
resp = await client.get(
"/api/v1/admin/audit",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 404
# ── PUT /admin/notifications/settings ─────────────────────────────────────────
async def test_admin_update_notification_settings_toggle(client: AsyncClient, admin_token: str):
# Read current state
get_resp = await client.get(
"/api/v1/admin/notifications/settings",
headers={"Authorization": f"Bearer {admin_token}"},
)
current = get_resp.json()["automatic_member_notifications_enabled"]
# Flip the flag
put_resp = await client.put(
"/api/v1/admin/notifications/settings",
headers={"Authorization": f"Bearer {admin_token}"},
json={"automatic_member_notifications_enabled": not current},
)
assert put_resp.status_code == 200
assert put_resp.json()["automatic_member_notifications_enabled"] is not current
async def test_admin_update_invoice_day_of_week(client: AsyncClient, admin_token: str):
resp = await client.put(
"/api/v1/admin/notifications/settings",
headers={"Authorization": f"Bearer {admin_token}"},
json={"invoice_day_of_week": 4}, # Friday
)
assert resp.status_code == 200
assert resp.json()["invoice_day_of_week"] == 4
async def test_admin_update_invoice_day_invalid(client: AsyncClient, admin_token: str):
"""Day of week must be 06; out-of-range values are rejected."""
resp = await client.put(
"/api/v1/admin/notifications/settings",
headers={"Authorization": f"Bearer {admin_token}"},
json={"invoice_day_of_week": 7},
)
assert resp.status_code == 422
# ── POST /admin/notifications/run ─────────────────────────────────────────────
async def test_admin_run_notifications(client: AsyncClient, admin_token: str):
resp = await client.post(
"/api/v1/admin/notifications/run",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert resp.status_code == 200
data = resp.json()
assert "automatic_member_notifications_enabled" in data
assert "public_holiday_messages_sent" in data
assert "invoice_reminders_sent" in data
assert isinstance(data["public_holiday_messages_sent"], int)
assert isinstance(data["invoice_reminders_sent"], int)
async def test_admin_run_notifications_requires_auth(client: AsyncClient):
resp = await client.post("/api/v1/admin/notifications/run")
assert resp.status_code in (401, 403)
async def test_admin_can_clear_notifications(client: AsyncClient, admin_token: str, db_session: AsyncSession):
member = await _member(db_session, "clearable@example.com", status="pending_review")
await _booking(db_session, member.id)
db_session.add(
AuditLog(
member_id=member.id,
member_email=member.email,
action_type="login",
area="members/login",
description="Member logged in successfully.",
status="success",
timestamp=datetime.now(timezone.utc),
)
)
await db_session.commit()
clear_resp = await client.post(
"/api/v1/admin/notifications/clear",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert clear_resp.status_code == 200
cleared = clear_resp.json()
assert cleared["items"] == []
assert cleared["total"] == 0
settings_row = (await db_session.execute(select(SiteSettings))).scalars().first()
assert settings_row is not None
assert settings_row.admin_notifications_cleared_before is not None
feed_resp = await client.get(
"/api/v1/admin/notifications",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert feed_resp.status_code == 200
assert feed_resp.json()["items"] == []
+76
View File
@@ -0,0 +1,76 @@
from datetime import datetime, timedelta, timezone
import pytest
from app.models.analytics import AnalyticsEvent
from tests.conftest import TestSessionLocal
@pytest.mark.asyncio
async def test_analytics_summary_includes_top_journeys(client, admin_token):
now = datetime.now(timezone.utc)
async with TestSessionLocal() as session:
session.add_all([
AnalyticsEvent(
event_type="page_view",
page="/",
element=None,
metadata_={},
session_id="session-1",
created_at=now - timedelta(minutes=5),
),
AnalyticsEvent(
event_type="page_view",
page="/pack-walks",
element=None,
metadata_={},
session_id="session-1",
created_at=now - timedelta(minutes=4),
),
AnalyticsEvent(
event_type="page_view",
page="/contact",
element=None,
metadata_={},
session_id="session-1",
created_at=now - timedelta(minutes=3),
),
AnalyticsEvent(
event_type="page_view",
page="/",
element=None,
metadata_={},
session_id="session-2",
created_at=now - timedelta(minutes=2),
),
AnalyticsEvent(
event_type="page_view",
page="/pack-walks",
element=None,
metadata_={},
session_id="session-2",
created_at=now - timedelta(minutes=1),
),
AnalyticsEvent(
event_type="page_view",
page="/pack-walks",
element=None,
metadata_={},
session_id="session-2",
created_at=now,
),
])
await session.commit()
response = await client.get(
"/api/v1/analytics/summary",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == 200, response.text
body = response.json()
assert "top_journeys" in body
assert body["top_journeys"][0] == {"label": "/ -> /pack-walks", "count": 2}
assert all(item["label"] != "/pack-walks -> /pack-walks" for item in body["top_journeys"])
+41
View File
@@ -0,0 +1,41 @@
import pytest
from sqlalchemy import select
from app.models.analytics import AnalyticsEvent
from tests.conftest import TestSessionLocal
@pytest.mark.asyncio
async def test_ingest_event_sets_anon_cookie_and_derives_session_server_side(client):
response = await client.post(
"/api/web/event",
json={
"event_type": "cta_click",
"page": "/pack-walks",
"element": "Book Now [pricing-card]",
"metadata": {
"variant": "pricing-card-weekly",
"destination": "/contact",
"screen": "1920x1080",
"nested": {"drop": True},
},
},
headers={"referer": "https://goodwalk.example/pack-walks"},
)
assert response.status_code == 201, response.text
assert "__gw_anon=" in response.headers.get("set-cookie", "")
assert "HttpOnly" in response.headers.get("set-cookie", "")
async with TestSessionLocal() as session:
result = await session.execute(
select(AnalyticsEvent).order_by(AnalyticsEvent.created_at.desc()).limit(1)
)
event = result.scalar_one()
assert event.session_id
assert event.metadata_["variant"] == "pricing-card-weekly"
assert event.metadata_["destination"] == "/contact"
assert event.metadata_["referrer"] == "https://goodwalk.example/pack-walks"
assert "screen" not in event.metadata_
assert "nested" not in event.metadata_
+81
View File
@@ -0,0 +1,81 @@
"""
Tests for the /api/v1/auth/* endpoints.
"""
import pytest
from httpx import AsyncClient
pytestmark = pytest.mark.asyncio
async def test_login_valid_credentials(client: AsyncClient, admin_user):
"""Login with correct credentials returns 200 and both tokens."""
response = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "testpassword"},
)
assert response.status_code == 200
data = response.json()
assert "access_token" in data
assert "refresh_token" in data
assert data["token_type"] == "bearer"
assert len(data["access_token"]) > 10
assert len(data["refresh_token"]) > 10
async def test_login_invalid_password(client: AsyncClient, admin_user):
"""Login with wrong password returns 401."""
response = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "wrongpassword"},
)
assert response.status_code == 401
assert "Invalid" in response.json()["detail"]
async def test_login_unknown_email(client: AsyncClient):
"""Login with unknown email returns 401."""
response = await client.post(
"/api/v1/auth/login",
json={"email": "nobody@example.com", "password": "whatever"},
)
assert response.status_code == 401
async def test_refresh_token_flow(client: AsyncClient, admin_user):
"""Valid refresh token returns a new token pair; old token is revoked."""
# Login to get initial tokens
login_resp = await client.post(
"/api/v1/auth/login",
json={"email": "admin@example.com", "password": "testpassword"},
)
assert login_resp.status_code == 200
tokens = login_resp.json()
original_refresh = tokens["refresh_token"]
# Use the refresh token to get a new pair
refresh_resp = await client.post(
"/api/v1/auth/refresh",
json={"refresh_token": original_refresh},
)
assert refresh_resp.status_code == 200
new_tokens = refresh_resp.json()
assert "access_token" in new_tokens
assert "refresh_token" in new_tokens
# New refresh token should be different
assert new_tokens["refresh_token"] != original_refresh
# Using the old refresh token should now fail (revoked)
reuse_resp = await client.post(
"/api/v1/auth/refresh",
json={"refresh_token": original_refresh},
)
assert reuse_resp.status_code == 401
async def test_refresh_invalid_token(client: AsyncClient):
"""Passing a made-up refresh token returns 401."""
response = await client.post(
"/api/v1/auth/refresh",
json={"refresh_token": "not-a-real-token"},
)
assert response.status_code == 401
+147
View File
@@ -0,0 +1,147 @@
import hashlib
import pytest
from httpx import AsyncClient
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.contact_lead import ContactLead
from app.models.member import MemberVerificationCode
pytestmark = pytest.mark.asyncio
async def test_public_contact_submission_creates_lead(client: AsyncClient, db_session: AsyncSession):
response = await client.post(
"/api/contact",
json={
"name": "Jamie Smith",
"email": "jamie@example.com",
"phone": "021 333 4444",
"services": ["Pack Walks", "Puppy Visits"],
"petName": "Buddy",
"petBreed": "Shih Tzu",
"location": "Ponsonby",
"serviceAreaStatus": "in_area",
"message": "Interested in a meet and greet.",
},
)
assert response.status_code == 201
body = response.json()
assert body["status"] == "invite"
assert body["email"] == "jamie@example.com"
result = await db_session.execute(select(ContactLead).where(ContactLead.email == "jamie@example.com"))
lead = result.scalars().first()
assert lead is not None
assert lead.requested_services == "Pack Walks, Puppy Visits"
assert lead.pet_name == "Buddy"
async def test_admin_can_invite_lead_into_onboarding(client: AsyncClient, admin_token: str):
create_response = await client.post(
"/api/contact",
json={
"name": "Jamie Smith",
"email": "jamie@example.com",
"phone": "021 333 4444",
"services": ["Pack Walks"],
"petName": "Buddy",
"petBreed": "Shih Tzu",
"location": "Ponsonby",
"message": "Interested in a meet and greet.",
},
)
lead_id = create_response.json()["id"]
invite_response = await client.post(
f"/api/v1/admin/leads/{lead_id}/invite",
headers={"Authorization": f"Bearer {admin_token}"},
json={"send_email": False},
)
assert invite_response.status_code == 200
body = invite_response.json()
assert body["lead"]["status"] == "invited"
assert body["member_status"] == "invited"
assert body["member_id"]
members_response = await client.get(
"/api/v1/admin/members",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert any(member["email"] == "jamie@example.com" for member in members_response.json())
async def test_lead_becomes_converted_after_member_activation(
client: AsyncClient,
admin_token: str,
db_session: AsyncSession,
):
create_response = await client.post(
"/api/contact",
json={
"name": "Jamie Smith",
"email": "jamie@example.com",
"services": ["Pack Walks"],
"petName": "Buddy",
},
)
lead_id = create_response.json()["id"]
invite_response = await client.post(
f"/api/v1/admin/leads/{lead_id}/invite",
headers={"Authorization": f"Bearer {admin_token}"},
json={"send_email": False},
)
member_id = invite_response.json()["member_id"]
await client.post("/api/v1/members/claim/request", json={"email": "jamie@example.com"})
await db_session.execute(
MemberVerificationCode.__table__.update()
.values(code_hash=hashlib.sha256("AABBCC".encode()).hexdigest())
)
await db_session.commit()
await client.post(
"/api/v1/members/claim/complete",
json={"email": "jamie@example.com", "code": "AABBCC", "password": "NewPass99!"},
)
await client.post(
"/api/v1/members/auth/login",
json={"email": "jamie@example.com", "password": "NewPass99!"},
)
await db_session.execute(
MemberVerificationCode.__table__.update()
.where(MemberVerificationCode.purpose == "login_2fa")
.values(code_hash=hashlib.sha256("112233".encode()).hexdigest())
)
await db_session.commit()
verify_response = await client.post(
"/api/v1/members/auth/login/verify",
json={"email": "jamie@example.com", "code": "112233"},
)
access_token = verify_response.json()["access_token"]
await client.put(
"/api/v1/members/onboarding",
headers={"Authorization": f"Bearer {access_token}"},
json={"onboarding_data": {"dog_name": "Buddy"}, "complete_onboarding": True},
)
await client.post(
"/api/v1/members/onboarding/contract",
headers={"Authorization": f"Bearer {access_token}"},
json={"signer_name": "Jamie Smith", "agreed": True},
)
activate_response = await client.post(
f"/api/v1/admin/members/{member_id}/activate",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert activate_response.status_code == 200
leads_response = await client.get(
"/api/v1/admin/leads",
headers={"Authorization": f"Bearer {admin_token}"},
)
matching_lead = next(lead for lead in leads_response.json() if lead["id"] == lead_id)
assert matching_lead["status"] == "converted"
+252
View File
@@ -0,0 +1,252 @@
from datetime import datetime, timedelta, timezone
from decimal import Decimal
import pytest
from sqlalchemy import select
from app.models.experiment import ExperimentEvent
from app.models.settings import SiteSettings
from tests.conftest import TestSessionLocal
@pytest.mark.asyncio
async def test_list_experiments_returns_seeded_registry(client):
response = await client.get("/api/experiments")
assert response.status_code == 200, response.text
body = response.json()
assert [item["experiment_key"] for item in body] == [
"homepage_hero_test",
"pricing_cta_test",
]
assert body[0]["cookie_name"] == "exp_homepage_hero"
assert body[0]["variants"][0]["variant_key"] == "control"
@pytest.mark.asyncio
async def test_ingest_experiment_event_persists_event(client):
now = datetime.now(timezone.utc).isoformat()
response = await client.post(
"/api/experiments/event",
json={
"experiment_key": "homepage_hero_test",
"variant_key": "control",
"session_id": "session_abcd1234",
"path": "/",
"event_name": "cta_click",
"timestamp": now,
"metadata": {
"element": "Explore Pack Walks",
"slot": "primary",
},
},
headers={"user-agent": "Mozilla/5.0"},
)
assert response.status_code == 202, response.text
assert response.json() == {"ok": True, "accepted": True}
async with TestSessionLocal() as session:
result = await session.execute(select(ExperimentEvent))
event = result.scalar_one()
assert event.experiment_key == "homepage_hero_test"
assert event.variant_key == "control"
assert event.event_type == "cta_click"
assert event.metadata_["element"] == "Explore Pack Walks"
@pytest.mark.asyncio
async def test_ingest_experiment_event_filters_bots(client):
response = await client.post(
"/api/experiments/impression",
json={
"experiment_key": "homepage_hero_test",
"variant_key": "control",
"session_id": "session_abcd1234",
"path": "/",
"event_name": "impression",
"timestamp": datetime.now(timezone.utc).isoformat(),
},
headers={"user-agent": "Googlebot/2.1"},
)
assert response.status_code == 202, response.text
assert response.json() == {"ok": True, "accepted": False}
async with TestSessionLocal() as session:
result = await session.execute(select(ExperimentEvent))
assert result.scalars().all() == []
@pytest.mark.asyncio
async def test_experiment_results_aggregate_by_variant(client, admin_token):
now = datetime.now(timezone.utc)
async with TestSessionLocal() as session:
session.add_all(
[
ExperimentEvent(
experiment_key="pricing_cta_test",
variant_key="control",
session_id="session-1",
path="/our-pricing",
event_type="impression",
created_at=(now - timedelta(minutes=5)).replace(tzinfo=None),
),
ExperimentEvent(
experiment_key="pricing_cta_test",
variant_key="control",
session_id="session-1",
path="/our-pricing",
event_type="cta_click",
created_at=(now - timedelta(minutes=4)).replace(tzinfo=None),
),
ExperimentEvent(
experiment_key="pricing_cta_test",
variant_key="control",
session_id="session-1",
path="/our-pricing",
event_type="conversion",
conversion_value=Decimal("1.00"),
created_at=(now - timedelta(minutes=3)).replace(tzinfo=None),
),
ExperimentEvent(
experiment_key="pricing_cta_test",
variant_key="meet_greet_emphasis",
session_id="session-2",
path="/our-pricing",
event_type="impression",
created_at=(now - timedelta(minutes=2)).replace(tzinfo=None),
),
ExperimentEvent(
experiment_key="pricing_cta_test",
variant_key="meet_greet_emphasis",
session_id="session-3",
path="/our-pricing",
event_type="impression",
created_at=(now - timedelta(minutes=1)).replace(tzinfo=None),
),
]
)
await session.commit()
response = await client.get(
"/api/v1/experiments/results?experiment_key=pricing_cta_test",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == 200, response.text
body = response.json()
assert len(body) == 1
assert body[0]["experiment_key"] == "pricing_cta_test"
assert body[0]["variants"][0] == {
"variant_key": "control",
"impressions": 1,
"cta_clicks": 1,
"form_starts": 0,
"form_submits": 0,
"conversions": 1,
"unique_sessions": 1,
"conversion_rate": 1.0,
"conversion_value_total": 1.0,
}
@pytest.mark.asyncio
async def test_admin_can_update_backend_managed_experiment_definition(client, admin_token):
response = await client.put(
"/api/admin/experiments/homepage_hero_test",
headers={"Authorization": f"Bearer {admin_token}"},
json={
"cookie_name": "exp_homepage_hero",
"name": "Homepage hero test",
"description": "Updated from admin",
"enabled": False,
"eligible_routes": ["/", "/contact"],
"variants": [
{
"variant_key": "control",
"label": "Original",
"allocation": 20,
"is_control": True,
},
{
"variant_key": "tiny_gang_social_proof",
"label": "Social proof",
"allocation": 80,
"is_control": False,
},
],
},
)
assert response.status_code == 200, response.text
body = response.json()
assert body["enabled"] is False
assert body["eligible_routes"] == ["/", "/contact"]
assert body["variants"][1]["allocation"] == 80
public_response = await client.get("/api/experiments")
assert public_response.status_code == 200, public_response.text
public_body = public_response.json()
updated = next(item for item in public_body if item["experiment_key"] == "homepage_hero_test")
assert updated["enabled"] is False
assert updated["description"] == "Updated from admin"
@pytest.mark.asyncio
async def test_public_experiments_return_empty_when_globally_disabled(client):
async with TestSessionLocal() as session:
session.add(SiteSettings(site_name="", experiments_enabled=False))
await session.commit()
response = await client.get("/api/experiments")
assert response.status_code == 200, response.text
assert response.json() == []
@pytest.mark.asyncio
async def test_experiment_ingest_is_ignored_when_globally_disabled(client):
async with TestSessionLocal() as session:
session.add(SiteSettings(site_name="", experiments_enabled=False))
await session.commit()
now = datetime.now(timezone.utc).isoformat()
response = await client.post(
"/api/experiments/event",
json={
"experiment_key": "homepage_hero_test",
"variant_key": "control",
"session_id": "session_abcd1234",
"path": "/",
"event_name": "cta_click",
"timestamp": now,
},
headers={"user-agent": "Mozilla/5.0"},
)
assert response.status_code == 202, response.text
assert response.json() == {"ok": True, "accepted": False}
async with TestSessionLocal() as session:
result = await session.execute(select(ExperimentEvent))
assert result.scalars().all() == []
@pytest.mark.asyncio
async def test_admin_experiments_returns_404_when_globally_disabled(client, admin_token):
async with TestSessionLocal() as session:
session.add(SiteSettings(site_name="", experiments_enabled=False))
await session.commit()
response = await client.get(
"/api/admin/experiments",
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == 404, response.text

Some files were not shown because too many files have changed in this diff Show More