From cfc193b713fff3e8be2bddd9a0d00bf58d72b111 Mon Sep 17 00:00:00 2001 From: ponzischeme89 Date: Fri, 8 May 2026 23:07:01 +1200 Subject: [PATCH] Deployment Script, Postgres migration, UX improvements --- CLAUDE.MD | 41 +- backend/app/api/mix_calculator.py | 32 +- backend/app/seed.py | 58 +- .../app/services/mix_calculator_filenames.py | 10 + backend/app/services/mix_calculator_pdf.py | 295 ++++++ .../data_entry_app_backend.egg-info/PKG-INFO | 5 +- .../SOURCES.txt | 47 + .../requires.txt | 3 + backend/pyproject.toml | 1 + backend/tests/test_costing_engine.py | 35 + deploy/Deploy.ps1 | 241 ++--- deploy/migrate-to-postgres.sh | 598 ++++++++++++ deploy/predeployment-check.sh | 306 ++++++ frontend/package.json | 8 +- .../scripts/vite-windows-eperm-workaround.cjs | 46 + frontend/src/lib/api.ts | 30 + .../src/lib/components/ClientShell.svelte | 911 ++---------------- .../MixCalculatorPrintDocument.svelte | 434 +++++++++ .../components/MixCalculatorPrintSheet.svelte | 277 +----- .../MixCalculatorEditor.svelte} | 692 ++----------- .../MixCalculatorPreviewModal.svelte | 165 ++++ .../MixCalculatorResultsPanel.svelte | 450 +++++++++ .../MixEditor.svelte} | 130 +-- .../navigation/AppNavSection.svelte | 129 +++ .../navigation/AppSecondaryRail.svelte | 200 ++++ .../navigation/AppSecondaryRailLayout.svelte | 81 ++ .../navigation/ClientPrimaryRail.svelte | 251 +++++ .../components/navigation/ClientTopbar.svelte | 395 ++++++++ .../navigation/WorkspaceSearchTrigger.svelte | 85 ++ .../src/lib/navigation/client-navigation.ts | 204 ++++ .../routes/mix-calculator/[id]/+page.svelte | 4 +- .../routes/mix-calculator/new/+page.svelte | 4 +- frontend/src/routes/mixes/[id]/+page.svelte | 4 +- frontend/src/routes/mixes/new/+page.svelte | 4 +- .../src/routes/raw-materials/+page.svelte | 247 +---- frontend/src/routes/reporting/+page.svelte | 450 +++------ frontend/src/routes/settings/+page.svelte | 232 +---- 37 files changed, 4390 insertions(+), 2715 deletions(-) create mode 100644 backend/app/services/mix_calculator_filenames.py create mode 100644 backend/app/services/mix_calculator_pdf.py create mode 100644 deploy/migrate-to-postgres.sh create mode 100644 deploy/predeployment-check.sh create mode 100644 frontend/scripts/vite-windows-eperm-workaround.cjs create mode 100644 frontend/src/lib/components/MixCalculatorPrintDocument.svelte rename frontend/src/lib/components/{MixCalculatorWorkspace.svelte => mix-calculator/MixCalculatorEditor.svelte} (52%) create mode 100644 frontend/src/lib/components/mix-calculator/MixCalculatorPreviewModal.svelte create mode 100644 frontend/src/lib/components/mix-calculator/MixCalculatorResultsPanel.svelte rename frontend/src/lib/components/{MixWorkspace.svelte => mixes/MixEditor.svelte} (92%) create mode 100644 frontend/src/lib/components/navigation/AppNavSection.svelte create mode 100644 frontend/src/lib/components/navigation/AppSecondaryRail.svelte create mode 100644 frontend/src/lib/components/navigation/AppSecondaryRailLayout.svelte create mode 100644 frontend/src/lib/components/navigation/ClientPrimaryRail.svelte create mode 100644 frontend/src/lib/components/navigation/ClientTopbar.svelte create mode 100644 frontend/src/lib/components/navigation/WorkspaceSearchTrigger.svelte create mode 100644 frontend/src/lib/navigation/client-navigation.ts diff --git a/CLAUDE.MD b/CLAUDE.MD index efdcf11..52bf0a3 100644 --- a/CLAUDE.MD +++ b/CLAUDE.MD @@ -1247,4 +1247,43 @@ approval workflow Power BI-ready outputs ``` -That gives the client safer data entry, gives the consultancy control and visibility, and gives Power BI a clean source instead of fragile workbook logic. \ No newline at end of file +That gives the client safer data entry, gives the consultancy control and visibility, and gives Power BI a clean source instead of fragile workbook logic. + +--- + +# Frontend layout debugging notes + +## Full-height layouts inside padded shells + +When a child layout uses negative margins to cancel a parent container's padding, `height: 100%` is often not enough to visually fill the container. + +Example pattern: + +```css +.parent { + --content-padding: 1.34rem; + padding: var(--content-padding); +} + +.child { + margin: calc(var(--content-padding) * -1); + height: 100%; +} +``` + +This can leave a visible gap at the bottom because the child is still only `100%` tall while being visually expanded outward by the negative margins. + +Preferred fix: + +```css +.child { + margin: calc(var(--content-padding) * -1); + height: calc(100% + (var(--content-padding) * 2)); + min-height: calc(100% + (var(--content-padding) * 2)); +} +``` + +Worker reasoning rule: + +- If a panel "almost" fills the viewport but leaves a strip equal to parent padding, inspect negative margins and the nearest padded scroll container before changing inner child heights. +- In this app, `AppSecondaryRailLayout.svelte` sits inside `ClientShell.svelte`'s padded `.content` container, so full-height fixes should account for `--content-padding`. diff --git a/backend/app/api/mix_calculator.py b/backend/app/api/mix_calculator.py index 38183e6..eab437b 100644 --- a/backend/app/api/mix_calculator.py +++ b/backend/app/api/mix_calculator.py @@ -1,4 +1,4 @@ -from fastapi import APIRouter, Depends, HTTPException, status +from fastapi import APIRouter, Depends, HTTPException, Response, status from sqlalchemy.orm import Session from app.api.deps import AuthSession, require_client_module_access @@ -13,14 +13,16 @@ from app.schemas.mix_calculator import ( ) from app.services.mix_calculator_service import ( build_mix_calculator_options, + can_view_all_mix_calculator_sessions, calculate_mix_calculator_preview, - serialize_mix_calculator_session, create_mix_calculator_session, get_mix_calculator_session, - update_mix_calculator_session, list_mix_calculator_sessions, - can_view_all_mix_calculator_sessions, + serialize_mix_calculator_session, + update_mix_calculator_session, ) +from app.services.mix_calculator_pdf import MixCalculatorPdfUnavailableError, build_mix_calculator_pdf +from app.services.mix_calculator_filenames import mix_calculator_pdf_filename router = APIRouter(prefix="/api/mix-calculator", tags=["mix-calculator"]) @@ -77,6 +79,28 @@ def read_mix_calculator_session( return serialize_mix_calculator_session(session_record, session) +@router.get("/{session_id}/pdf") +def download_mix_calculator_session_pdf( + session_id: int, + session: AuthSession = Depends(require_client_module_access("mix_calculator")), + db: Session = Depends(get_db), +): + session_record = get_mix_calculator_session(db, auth_session=session, session_id=session_id) + if session_record is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Mix calculator session not found") + + try: + pdf_bytes = build_mix_calculator_pdf(session_record) + except MixCalculatorPdfUnavailableError as exc: + raise HTTPException(status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail=str(exc)) from exc + filename = mix_calculator_pdf_filename(session_record) + return Response( + content=pdf_bytes, + media_type="application/pdf", + headers={"Content-Disposition": f'attachment; filename="{filename}"'}, + ) + + @router.patch("/{session_id}", response_model=MixCalculatorSessionRead) def patch_mix_calculator_session( session_id: int, diff --git a/backend/app/seed.py b/backend/app/seed.py index 27da275..1dbf349 100644 --- a/backend/app/seed.py +++ b/backend/app/seed.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections import Counter from datetime import date, datetime import logging +import os from pathlib import Path import re @@ -22,10 +23,48 @@ from app.services.client_access_service import MODULE_CATALOG, default_access_le TENANT_ID = "hunter-premium-produce" WORKBOOK_EFFECTIVE_DATE = date(2025, 9, 1) WORKBOOK_SENTINEL_ITEM_ID = "404266" -WORKBOOK_PATH = Path(__file__).resolve().parents[2] / "Input Cost Spreadsheet(1).xlsx" +WORKBOOK_FILENAME = "Input Cost Spreadsheet(1).xlsx" logger = logging.getLogger("data_entry_app.seed") +def _workbook_candidates() -> list[Path]: + env_value = os.getenv("WORKBOOK_PATH") + env_path = env_value.strip() if isinstance(env_value, str) and env_value.strip() else None + repo_root = Path(__file__).resolve().parents[2] + cwd = Path.cwd() + + candidates = [ + Path(env_path) if env_path else None, + Path("/srv/lean101-clients") / WORKBOOK_FILENAME, + repo_root / WORKBOOK_FILENAME, + cwd / WORKBOOK_FILENAME, + Path("/app") / WORKBOOK_FILENAME, + Path("/") / WORKBOOK_FILENAME, + ] + + ordered: list[Path] = [] + seen: set[str] = set() + for candidate in candidates: + if candidate is None: + continue + key = str(candidate) + if key in seen: + continue + seen.add(key) + ordered.append(candidate) + return ordered + + +def _resolve_workbook_path() -> Path: + for candidate in _workbook_candidates(): + if candidate.exists(): + return candidate + return _workbook_candidates()[0] + + +WORKBOOK_PATH = _resolve_workbook_path() + + def _text(value) -> str | None: if value is None: return None @@ -129,9 +168,12 @@ def _build_process_key(label, grading_cost: float, bagging_cost: float, cracking def _load_workbook(): - if not WORKBOOK_PATH.exists(): - raise FileNotFoundError(f"Workbook not found at {WORKBOOK_PATH}") - return load_workbook(WORKBOOK_PATH, data_only=True) + workbook_path = _resolve_workbook_path() + if not workbook_path.exists(): + raise FileNotFoundError( + f"Workbook not found. Checked: {', '.join(str(path) for path in _workbook_candidates())}" + ) + return load_workbook(workbook_path, data_only=True) def _read_raw_material_rows(workbook) -> list[dict]: @@ -684,10 +726,14 @@ def seed_costing_workspace(db): def seed_if_empty(): Base.metadata.create_all(bind=engine) with SessionLocal() as db: - if WORKBOOK_PATH.exists(): + workbook_path = _resolve_workbook_path() + if workbook_path.exists(): seed_costing_workspace(db) else: - logger.warning("Skipping costing workspace seed because workbook is missing at %s", WORKBOOK_PATH) + logger.warning( + "Skipping costing workspace seed because workbook is missing. Checked: %s", + ", ".join(str(path) for path in _workbook_candidates()), + ) seed_client_access(db) seed_access(db) db.commit() diff --git a/backend/app/services/mix_calculator_filenames.py b/backend/app/services/mix_calculator_filenames.py new file mode 100644 index 0000000..b2575ea --- /dev/null +++ b/backend/app/services/mix_calculator_filenames.py @@ -0,0 +1,10 @@ +from __future__ import annotations + +import re + +from app.models.mix_calculator import MixCalculatorSession + + +def mix_calculator_pdf_filename(session_record: MixCalculatorSession) -> str: + raw = f"{session_record.session_number}_{session_record.client_name}_{session_record.product_name}.pdf" + return re.sub(r"[^\w.\-]+", "_", raw) diff --git a/backend/app/services/mix_calculator_pdf.py b/backend/app/services/mix_calculator_pdf.py new file mode 100644 index 0000000..309d40b --- /dev/null +++ b/backend/app/services/mix_calculator_pdf.py @@ -0,0 +1,295 @@ +from __future__ import annotations + +from io import BytesIO +from math import ceil + +from app.models.mix_calculator import MixCalculatorSession + + +class MixCalculatorPdfUnavailableError(RuntimeError): + pass + + +def _fmt_number(value: float, digits: int = 2) -> str: + return f"{value:.{digits}f}" + + +def _fractional_bag_warning(session_record: MixCalculatorSession) -> str | None: + rounded_bags = round(session_record.total_bags) + if abs(session_record.total_bags - rounded_bags) < 1e-9: + return None + return ( + f"Batch size {session_record.batch_size_kg:g}kg produces {session_record.total_bags:.2f} bags " + f"for {session_record.product_unit_of_measure}. This is not a whole-bag quantity." + ) + + +def build_mix_calculator_pdf(session_record: MixCalculatorSession) -> bytes: + try: + from reportlab.lib import colors + from reportlab.lib.pagesizes import A4 + from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet + from reportlab.lib.units import mm + from reportlab.platypus import Paragraph, SimpleDocTemplate, Spacer, Table, TableStyle + except ModuleNotFoundError as exc: + raise MixCalculatorPdfUnavailableError( + "PDF generation is unavailable because 'reportlab' is not installed. " + "Install backend dependencies again to enable PDF export." + ) from exc + + buffer = BytesIO() + document = SimpleDocTemplate( + buffer, + pagesize=A4, + leftMargin=14 * mm, + rightMargin=14 * mm, + topMargin=14 * mm, + bottomMargin=14 * mm, + title=f"{session_record.session_number} - {session_record.product_name}", + author="Lean 101 Clients", + ) + + styles = getSampleStyleSheet() + eyebrow = ParagraphStyle( + "Eyebrow", + parent=styles["BodyText"], + fontName="Helvetica-Bold", + fontSize=8, + leading=10, + textColor=colors.HexColor("#62736B"), + spaceAfter=5, + ) + title = ParagraphStyle( + "Title", + parent=styles["Heading1"], + fontName="Helvetica-Bold", + fontSize=24, + leading=26, + textColor=colors.HexColor("#21312A"), + spaceAfter=6, + ) + subtitle = ParagraphStyle( + "Subtitle", + parent=styles["BodyText"], + fontName="Helvetica", + fontSize=10, + leading=13, + textColor=colors.HexColor("#6B7A73"), + ) + label = ParagraphStyle( + "Label", + parent=styles["BodyText"], + fontName="Helvetica-Bold", + fontSize=7, + leading=9, + textColor=colors.HexColor("#6B7A73"), + ) + value = ParagraphStyle( + "Value", + parent=styles["BodyText"], + fontName="Helvetica-Bold", + fontSize=11, + leading=13, + textColor=colors.HexColor("#21312A"), + ) + card_value = ParagraphStyle( + "CardValue", + parent=value, + fontSize=16, + leading=18, + ) + body = ParagraphStyle( + "Body", + parent=styles["BodyText"], + fontName="Helvetica", + fontSize=9, + leading=12, + textColor=colors.HexColor("#304038"), + ) + section_title = ParagraphStyle( + "SectionTitle", + parent=styles["Heading2"], + fontName="Helvetica-Bold", + fontSize=13, + leading=15, + textColor=colors.HexColor("#21312A"), + ) + + warnings = [] + bag_warning = _fractional_bag_warning(session_record) + if bag_warning: + warnings.append(bag_warning) + + story = [ + Paragraph(f"Mix Calculator | {session_record.session_number}", eyebrow), + Paragraph(session_record.product_name, title), + Paragraph(f"{session_record.client_name}  ·  {session_record.mix_name}", subtitle), + Spacer(1, 8), + ] + + header_table = Table( + [ + [ + [ + Paragraph("Mix date", label), + Paragraph(session_record.mix_date.strftime("%d %b %Y"), value), + ], + [ + Paragraph("Prepared by", label), + Paragraph(session_record.prepared_by_name, value), + ], + [ + Paragraph("Status", label), + Paragraph(session_record.status.title(), value), + ], + ] + ], + colWidths=[60 * mm, 60 * mm, 52 * mm], + ) + header_table.setStyle( + TableStyle( + [ + ("VALIGN", (0, 0), (-1, -1), "TOP"), + ("BOX", (0, 0), (-1, -1), 0.8, colors.HexColor("#DBE4DE")), + ("INNERGRID", (0, 0), (-1, -1), 0.8, colors.HexColor("#DBE4DE")), + ("BACKGROUND", (0, 0), (-1, -1), colors.white), + ("LEFTPADDING", (0, 0), (-1, -1), 10), + ("RIGHTPADDING", (0, 0), (-1, -1), 10), + ("TOPPADDING", (0, 0), (-1, -1), 9), + ("BOTTOMPADDING", (0, 0), (-1, -1), 9), + ] + ) + ) + story.extend([header_table, Spacer(1, 10)]) + + summary_table = Table( + [ + [ + [Paragraph("Batch size", label), Paragraph(f"{_fmt_number(session_record.batch_size_kg)}kg", card_value)], + [Paragraph("Total output", label), Paragraph(f"{_fmt_number(session_record.total_kg)}kg", card_value)], + [Paragraph("Bags", label), Paragraph(_fmt_number(session_record.total_bags), card_value)], + [Paragraph("Unit pack", label), Paragraph(f"{_fmt_number(session_record.product_unit_size_kg)}kg", card_value)], + ] + ], + colWidths=[43 * mm, 43 * mm, 43 * mm, 43 * mm], + ) + summary_table.setStyle( + TableStyle( + [ + ("BOX", (0, 0), (-1, -1), 0.8, colors.HexColor("#DBE4DE")), + ("INNERGRID", (0, 0), (-1, -1), 0.8, colors.HexColor("#DBE4DE")), + ("BACKGROUND", (0, 0), (-1, -1), colors.HexColor("#F9FBFA")), + ("VALIGN", (0, 0), (-1, -1), "TOP"), + ("LEFTPADDING", (0, 0), (-1, -1), 10), + ("RIGHTPADDING", (0, 0), (-1, -1), 10), + ("TOPPADDING", (0, 0), (-1, -1), 10), + ("BOTTOMPADDING", (0, 0), (-1, -1), 10), + ] + ) + ) + story.extend([summary_table, Spacer(1, 10)]) + + detail_table = Table( + [ + [ + [Paragraph("Mix source", label), Paragraph(session_record.mix_name, value), Paragraph(f"Saved against {session_record.product_unit_of_measure} units.", body)], + [Paragraph("Composition", label), Paragraph(f"{_fmt_number(sum(line.mix_percentage for line in session_record.lines))}%", value), Paragraph(f"{len(session_record.lines)} raw material{'s' if len(session_record.lines) != 1 else ''} in the blend.", body)], + [Paragraph("Estimated pages", label), Paragraph(str(max(1, ceil(len(session_record.lines) / 18))), value), Paragraph("Formatted for A4 PDF export.", body)], + ] + ], + colWidths=[60 * mm, 60 * mm, 52 * mm], + ) + detail_table.setStyle( + TableStyle( + [ + ("BACKGROUND", (0, 0), (-1, -1), colors.HexColor("#F4F8F5")), + ("VALIGN", (0, 0), (-1, -1), "TOP"), + ("LEFTPADDING", (0, 0), (-1, -1), 10), + ("RIGHTPADDING", (0, 0), (-1, -1), 10), + ("TOPPADDING", (0, 0), (-1, -1), 10), + ("BOTTOMPADDING", (0, 0), (-1, -1), 10), + ] + ) + ) + story.extend([detail_table, Spacer(1, 10)]) + + if session_record.notes: + notes_table = Table( + [[Paragraph("Notes", label)], [Paragraph(session_record.notes.replace("\n", "
"), body)]], + colWidths=[172 * mm], + ) + notes_table.setStyle( + TableStyle( + [ + ("BACKGROUND", (0, 0), (-1, 0), colors.HexColor("#F4F8F5")), + ("BOX", (0, 0), (-1, -1), 0.8, colors.HexColor("#DBE4DE")), + ("LEFTPADDING", (0, 0), (-1, -1), 10), + ("RIGHTPADDING", (0, 0), (-1, -1), 10), + ("TOPPADDING", (0, 0), (-1, -1), 8), + ("BOTTOMPADDING", (0, 0), (-1, -1), 8), + ] + ) + ) + story.extend([notes_table, Spacer(1, 10)]) + + if warnings: + warning_rows = [[Paragraph("Warnings", label)]] + warning_rows.extend([[Paragraph(warning, body)] for warning in warnings]) + warnings_table = Table(warning_rows, colWidths=[172 * mm]) + warnings_table.setStyle( + TableStyle( + [ + ("BACKGROUND", (0, 0), (-1, 0), colors.HexColor("#FFF5E6")), + ("BACKGROUND", (0, 1), (-1, -1), colors.HexColor("#FFF9EF")), + ("BOX", (0, 0), (-1, -1), 0.8, colors.HexColor("#E8C483")), + ("LEFTPADDING", (0, 0), (-1, -1), 10), + ("RIGHTPADDING", (0, 0), (-1, -1), 10), + ("TOPPADDING", (0, 0), (-1, -1), 8), + ("BOTTOMPADDING", (0, 0), (-1, -1), 8), + ] + ) + ) + story.extend([warnings_table, Spacer(1, 10)]) + + story.extend( + [ + Paragraph("Required Raw Materials", label), + Paragraph("Blend composition", section_title), + Paragraph(f"{session_record.product_unit_of_measure} · {_fmt_number(session_record.product_unit_size_kg)}kg per unit", subtitle), + Spacer(1, 6), + ] + ) + + table_rows = [["Raw material", "Mix %", "Required kg", "Unit"]] + for line in session_record.lines: + table_rows.append( + [ + Paragraph(f"{line.raw_material_name}", body), + Paragraph(f"{_fmt_number(line.mix_percentage)}%", body), + Paragraph(f"{_fmt_number(line.required_kg)}kg", body), + Paragraph(line.unit, body), + ] + ) + + composition_table = Table(table_rows, colWidths=[88 * mm, 24 * mm, 34 * mm, 26 * mm], repeatRows=1) + composition_table.setStyle( + TableStyle( + [ + ("BACKGROUND", (0, 0), (-1, 0), colors.HexColor("#EEF4F0")), + ("TEXTCOLOR", (0, 0), (-1, 0), colors.HexColor("#4F6158")), + ("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"), + ("FONTSIZE", (0, 0), (-1, 0), 8), + ("BOTTOMPADDING", (0, 0), (-1, 0), 8), + ("TOPPADDING", (0, 0), (-1, 0), 8), + ("LEFTPADDING", (0, 0), (-1, -1), 9), + ("RIGHTPADDING", (0, 0), (-1, -1), 9), + ("GRID", (0, 0), (-1, -1), 0.6, colors.HexColor("#DBE4DE")), + ("VALIGN", (0, 0), (-1, -1), "TOP"), + ("ROWBACKGROUNDS", (0, 1), (-1, -1), [colors.white, colors.HexColor("#FBFCFB")]), + ] + ) + ) + story.append(composition_table) + + document.build(story) + return buffer.getvalue() diff --git a/backend/data_entry_app_backend.egg-info/PKG-INFO b/backend/data_entry_app_backend.egg-info/PKG-INFO index 4f99b73..528dafb 100644 --- a/backend/data_entry_app_backend.egg-info/PKG-INFO +++ b/backend/data_entry_app_backend.egg-info/PKG-INFO @@ -1,10 +1,13 @@ Metadata-Version: 2.4 Name: data-entry-app-backend -Version: 0.1.2 +Version: 0.1.5 Summary: Costing platform MVP backend Requires-Python: >=3.11 Requires-Dist: fastapi<1.0,>=0.115 +Requires-Dist: openpyxl<4.0,>=3.1 Requires-Dist: uvicorn[standard]<1.0,>=0.30 Requires-Dist: sqlalchemy<3.0,>=2.0 Requires-Dist: pydantic<3.0,>=2.8 Requires-Dist: pytest<9.0,>=8.0 +Requires-Dist: psycopg[binary]<4.0,>=3.2 +Requires-Dist: reportlab<5.0,>=4.2 diff --git a/backend/data_entry_app_backend.egg-info/SOURCES.txt b/backend/data_entry_app_backend.egg-info/SOURCES.txt index 2082ccb..b7354a9 100644 --- a/backend/data_entry_app_backend.egg-info/SOURCES.txt +++ b/backend/data_entry_app_backend.egg-info/SOURCES.txt @@ -2,33 +2,80 @@ pyproject.toml ./app/__init__.py ./app/main.py ./app/seed.py +./app/seed_access.py ./app/api/__init__.py +./app/api/access.py +./app/api/auth.py +./app/api/client_access.py +./app/api/dashboard.py +./app/api/deps.py +./app/api/mix_calculator.py ./app/api/mixes.py ./app/api/powerbi.py ./app/api/products.py ./app/api/raw_materials.py ./app/api/scenarios.py ./app/core/__init__.py +./app/core/access.py ./app/core/config.py +./app/core/security.py ./app/db/__init__.py +./app/db/migrations.py ./app/db/session.py ./app/models/__init__.py +./app/models/access.py ./app/models/assumption.py +./app/models/client_access.py ./app/models/mix.py +./app/models/mix_calculator.py ./app/models/product.py ./app/models/raw_material.py ./app/models/scenario.py ./app/schemas/__init__.py +./app/schemas/client_access.py ./app/schemas/mix.py +./app/schemas/mix_calculator.py ./app/schemas/product.py ./app/schemas/raw_material.py ./app/schemas/scenario.py ./app/services/__init__.py +./app/services/client_access_service.py ./app/services/costing_engine.py +./app/services/mix_calculator_filenames.py +./app/services/mix_calculator_pdf.py +./app/services/mix_calculator_service.py ./app/services/scenario_engine.py +app/__init__.py +app/main.py +app/seed.py +app/api/__init__.py +app/api/mixes.py +app/api/powerbi.py +app/api/products.py +app/api/raw_materials.py +app/api/scenarios.py +app/core/__init__.py +app/core/config.py +app/db/__init__.py +app/db/session.py +app/models/__init__.py +app/models/assumption.py +app/models/mix.py +app/models/product.py +app/models/raw_material.py +app/models/scenario.py +app/schemas/__init__.py +app/schemas/mix.py +app/schemas/product.py +app/schemas/raw_material.py +app/schemas/scenario.py +app/services/__init__.py +app/services/costing_engine.py +app/services/scenario_engine.py data_entry_app_backend.egg-info/PKG-INFO data_entry_app_backend.egg-info/SOURCES.txt data_entry_app_backend.egg-info/dependency_links.txt data_entry_app_backend.egg-info/requires.txt data_entry_app_backend.egg-info/top_level.txt +tests/test_access.py tests/test_costing_engine.py \ No newline at end of file diff --git a/backend/data_entry_app_backend.egg-info/requires.txt b/backend/data_entry_app_backend.egg-info/requires.txt index a9fc968..c97d2eb 100644 --- a/backend/data_entry_app_backend.egg-info/requires.txt +++ b/backend/data_entry_app_backend.egg-info/requires.txt @@ -1,5 +1,8 @@ fastapi<1.0,>=0.115 +openpyxl<4.0,>=3.1 uvicorn[standard]<1.0,>=0.30 sqlalchemy<3.0,>=2.0 pydantic<3.0,>=2.8 pytest<9.0,>=8.0 +psycopg[binary]<4.0,>=3.2 +reportlab<5.0,>=4.2 diff --git a/backend/pyproject.toml b/backend/pyproject.toml index f163d99..3940c5b 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -15,6 +15,7 @@ dependencies = [ "pydantic>=2.8,<3.0", "pytest>=8.0,<9.0", "psycopg[binary]>=3.2,<4.0", + "reportlab>=4.2,<5.0", ] [tool.setuptools] diff --git a/backend/tests/test_costing_engine.py b/backend/tests/test_costing_engine.py index 0e5cc26..abbb8e9 100644 --- a/backend/tests/test_costing_engine.py +++ b/backend/tests/test_costing_engine.py @@ -344,6 +344,41 @@ def test_mix_calculator_endpoints_respect_owner_visibility(): assert operator_detail_response.status_code == 404 +def test_mix_calculator_pdf_endpoint_returns_pdf(): + with TestClient(app) as client: + superadmin_login = client.post( + "/api/auth/client/login", + json={"email": settings.client_email, "password": settings.client_password}, + ) + headers = {"Authorization": f"Bearer {superadmin_login.json()['token']}"} + + options_response = client.get("/api/mix-calculator/options", headers=headers) + seeded_product = next( + product for product in options_response.json()["products"] if product["product_name"] == "Specialty Pigeon Breeder 20kg" + ) + + create_response = client.post( + "/api/mix-calculator", + json={ + "mix_date": "2026-04-29", + "client_name": seeded_product["client_name"], + "product_id": seeded_product["product_id"], + "batch_size_kg": 560, + "prepared_by_name": "Amelia Hart", + "notes": "Morning production run", + }, + headers=headers, + ) + created = create_response.json() + + pdf_response = client.get(f"/api/mix-calculator/{created['id']}/pdf", headers=headers) + + assert pdf_response.status_code == 200 + assert pdf_response.headers["content-type"] == "application/pdf" + assert "attachment;" in pdf_response.headers["content-disposition"] + assert pdf_response.content.startswith(b"%PDF") + + def test_module_permission_blocks_client_module_access(): with TestClient(app) as client: admin_login_response = client.post( diff --git a/deploy/Deploy.ps1 b/deploy/Deploy.ps1 index 368076a..3bcb93e 100644 --- a/deploy/Deploy.ps1 +++ b/deploy/Deploy.ps1 @@ -1,219 +1,182 @@ <# .SYNOPSIS - Build and deploy the Lean 101 Clients app to a Digital Ocean droplet over SSH. + Deploy the Lean 101 Clients app to a Digital Ocean droplet over SSH. .DESCRIPTION - Runs `docker compose` against `docker-compose.production.yml` on the remote - host. The same script handles first-time bootstrap and subsequent updates: + Tars the local source tree, uploads it to the droplet, and runs + docker compose up --build. No git required on the server. - * On bootstrap (-Bootstrap): creates the remote directory, clones the - repo (or updates if already present), uploads the local env file, and - brings the stack up with `docker compose ... up -d --build`. - - * On update (default): SSHes to the host, fetches the requested branch, - uploads a refreshed env file (if changed), then runs - `docker compose ... up -d --build` followed by a healthcheck. - - The script never executes destructive commands without asking, except for - recreating containers (which preserves the named Postgres volume). + The same script handles first-time setup and subsequent updates. .PARAMETER RemoteHost Hostname or IP of the Digital Ocean droplet. Required. .PARAMETER RemoteUser - SSH user on the droplet. Defaults to `root`. + SSH user. Defaults to 'root'. .PARAMETER RemotePath - Absolute path on the droplet where the repo lives. Defaults to - `/srv/lean101-clients`. - -.PARAMETER Branch - Git branch to deploy. Defaults to `main`. - -.PARAMETER RepoUrl - Git URL used during bootstrap when the remote directory is empty. - Required only with -Bootstrap. + Absolute path on the droplet. Defaults to '/srv/lean101-clients'. .PARAMETER EnvFile - Local path to the env file that should land on the droplet as - `/.env.production`. Defaults to `.env.production`. + Local path to the production env file. Defaults to '.env.production'. .PARAMETER SshKey - Optional path to an SSH private key. If omitted, the script relies on - ssh-agent / default keys. + Optional path to an SSH private key. .PARAMETER ComposeFile - Compose file name on the remote host. Defaults to - `docker-compose.production.yml`. - -.PARAMETER Bootstrap - Run first-time setup (clone, upload env, build, up). - -.PARAMETER SkipBuild - Pass `--no-build` to docker compose (use when only env changed). + Compose file name on the remote host. Defaults to 'docker-compose.production.yml'. .PARAMETER Seed - Run `python -m app.seed` inside the backend container after the stack is up. + Run 'python -m app.seed' inside the backend container after the stack is up. .PARAMETER Logs - After deploy, tail logs for ~20 lines so you can verify the stack came up. + Tail logs for ~60 lines after deploy to verify the stack came up. + +.PARAMETER SkipBuild + Pass --no-build to docker compose (use when only env changed). .EXAMPLE - ./deploy/Deploy.ps1 -RemoteHost 203.0.113.10 -Bootstrap -RepoUrl git@github.com:ponzischeme89/data-entry-app.git + ./deploy/Deploy.ps1 -RemoteHost 209.38.24.231 .EXAMPLE - ./deploy/Deploy.ps1 -RemoteHost 203.0.113.10 + ./deploy/Deploy.ps1 -RemoteHost 209.38.24.231 -Seed -Logs #> [CmdletBinding()] param( [Parameter(Mandatory = $true)] [string] $RemoteHost, - [string] $RemoteUser = "root", - [string] $RemotePath = "/srv/lean101-clients", - [string] $Branch = "main", - [string] $RepoUrl, - [string] $EnvFile = ".env.production", + [string] $RemoteUser = "root", + [string] $RemotePath = "/srv/lean101-clients", + [string] $EnvFile = ".env.production", [string] $SshKey, - [string] $ComposeFile = "docker-compose.production.yml", - [switch] $Bootstrap, - [switch] $SkipBuild, + [string] $ComposeFile = "docker-compose.production.yml", [switch] $Seed, - [switch] $Logs + [switch] $Logs, + [switch] $SkipBuild ) $ErrorActionPreference = "Stop" Set-StrictMode -Version Latest -function Write-Step($message) { - Write-Host "==> $message" -ForegroundColor Cyan +# ── Helpers ─────────────────────────────────────────────────────────────────── +function Write-Step($msg) { Write-Host "==> $msg" -ForegroundColor Cyan } +function Write-Warn($msg) { Write-Host "!! $msg" -ForegroundColor Yellow } + +function Get-RepoRoot { + $dir = Split-Path -Parent $PSScriptRoot + if (-not $dir) { $dir = (Get-Location).Path } + return $dir } -function Write-Warn($message) { - Write-Host "!! $message" -ForegroundColor Yellow +$RepoRoot = Get-RepoRoot +$SshTarget = "$RemoteUser@$RemoteHost" +$SshOpts = @("-o", "StrictHostKeyChecking=accept-new", "-o", "BatchMode=no") +if ($SshKey) { $SshOpts += @("-i", $SshKey) } + +function Invoke-Ssh([string] $cmd) { + & ssh @SshOpts $SshTarget $cmd + if ($LASTEXITCODE -ne 0) { throw "Remote command failed (exit $LASTEXITCODE): $cmd" } } -function Resolve-RepoRoot { - $scriptDir = Split-Path -Parent $MyInvocation.ScriptName - if (-not $scriptDir) { $scriptDir = $PSScriptRoot } - return (Resolve-Path (Join-Path $scriptDir "..")).Path +function Invoke-Scp([string] $local, [string] $remote) { + & scp @SshOpts $local "${SshTarget}:${remote}" + if ($LASTEXITCODE -ne 0) { throw "scp failed: $local -> $remote" } } -$RepoRoot = Resolve-RepoRoot +# ── Resolve paths ───────────────────────────────────────────────────────────── Push-Location $RepoRoot try { - $envPath = if ([System.IO.Path]::IsPathRooted($EnvFile)) { $EnvFile } else { Join-Path $RepoRoot $EnvFile } - if (-not (Test-Path $envPath)) { - throw "Env file not found at '$envPath'. Copy .env.production.example to $EnvFile and fill in production secrets first." + $EnvPath = if ([System.IO.Path]::IsPathRooted($EnvFile)) { $EnvFile } else { Join-Path $RepoRoot $EnvFile } + if (-not (Test-Path $EnvPath)) { + throw "Env file not found at '$EnvPath'. Copy .env.production.example and fill in secrets." } - $sshTarget = "$RemoteUser@$RemoteHost" - $sshOpts = @("-o", "StrictHostKeyChecking=accept-new") - if ($SshKey) { $sshOpts += @("-i", $SshKey) } - - function Invoke-Ssh([string] $remoteCommand) { - & ssh @sshOpts $sshTarget $remoteCommand - if ($LASTEXITCODE -ne 0) { - throw "Remote command failed (exit $LASTEXITCODE): $remoteCommand" - } - } - - function Invoke-Scp([string] $localPath, [string] $remoteDest) { - & scp @sshOpts $localPath "$($sshTarget):$remoteDest" - if ($LASTEXITCODE -ne 0) { - throw "scp failed for $localPath -> $remoteDest" - } - } - - Write-Step "Verifying SSH connectivity to $sshTarget" + # ── Connectivity check ────────────────────────────────────────────────────── + Write-Step "Checking SSH connectivity to $SshTarget" Invoke-Ssh "echo connected as `$(whoami) on `$(hostname)" - Write-Step "Verifying Docker is installed on the droplet" - Invoke-Ssh "command -v docker >/dev/null 2>&1 && docker --version && docker compose version" + # ── Package source files ──────────────────────────────────────────────────── + Write-Step "Packaging source files (excluding node_modules, caches, etc.)" - if ($Bootstrap) { - if (-not $RepoUrl) { - throw "-RepoUrl is required when using -Bootstrap." - } - Write-Step "Bootstrapping $RemotePath from $RepoUrl ($Branch)" - $bootstrapScript = @" -set -euo pipefail -mkdir -p '$RemotePath' -cd '$RemotePath' -if [ ! -d .git ]; then - git clone --branch '$Branch' '$RepoUrl' . -else - git remote set-url origin '$RepoUrl' - git fetch origin '$Branch' - git checkout '$Branch' - git reset --hard 'origin/$Branch' -fi -"@ - Invoke-Ssh $bootstrapScript - } else { - Write-Step "Updating $RemotePath to latest $Branch" - $updateScript = @" -set -euo pipefail -cd '$RemotePath' -git fetch origin '$Branch' -git checkout '$Branch' -git reset --hard 'origin/$Branch' -"@ - Invoke-Ssh $updateScript - } + $TarFile = Join-Path $env:TEMP "lean101-deploy-$(Get-Date -Format 'yyyyMMdd-HHmmss').tar.gz" - Write-Step "Uploading $EnvFile to $RemotePath/.env.production" - Invoke-Scp $envPath "$RemotePath/.env.production" + $excludes = @( + "--exclude=./node_modules", + "--exclude=./frontend/node_modules", + "--exclude=./frontend/.svelte-kit", + "--exclude=./frontend/build", + "--exclude=./.git", + "--exclude=./__pycache__", + "--exclude=./backend/__pycache__", + "--exclude=./backend/app/__pycache__", + "--exclude=./**/__pycache__", + "--exclude=./*.pyc", + "--exclude=./.env", + "--exclude=./.env.production", + "--exclude=./.env.alpha", + "--exclude=./data_entry_app.db", + "--exclude=./*.db" + ) + + & tar -czf $TarFile @excludes -C $RepoRoot . + if ($LASTEXITCODE -ne 0) { throw "tar failed" } + + $TarSize = [math]::Round((Get-Item $TarFile).Length / 1MB, 1) + Write-Host " Archive: $TarFile ($TarSize MB)" + + # ── Upload env file ───────────────────────────────────────────────────────── + Write-Step "Uploading env file" + Invoke-Scp $EnvPath "$RemotePath/.env.production" Invoke-Ssh "chmod 600 '$RemotePath/.env.production'" - $composeArgs = @( - "--env-file", ".env.production", - "-f", $ComposeFile - ) -join " " + # ── Upload and extract source ──────────────────────────────────────────────── + Write-Step "Uploading source archive" + Invoke-Scp $TarFile "/tmp/lean101-deploy.tar.gz" + Remove-Item $TarFile -Force - $buildFlag = if ($SkipBuild) { "" } else { "--build" } + Write-Step "Extracting on server" + Invoke-Ssh "mkdir -p '$RemotePath' && tar -xzf /tmp/lean101-deploy.tar.gz -C '$RemotePath' && rm /tmp/lean101-deploy.tar.gz" - Write-Step "Pulling base images" - Invoke-Ssh "cd '$RemotePath' && docker compose $composeArgs pull --ignore-pull-failures || true" + # ── Docker compose up ─────────────────────────────────────────────────────── + $ComposeArgs = "--env-file .env.production -f $ComposeFile" + $BuildFlag = if ($SkipBuild) { "--no-build" } else { "--build" } - Write-Step "Bringing the stack up (build=$([bool](-not $SkipBuild)))" - Invoke-Ssh "cd '$RemotePath' && docker compose $composeArgs up -d $buildFlag --remove-orphans" + Write-Step "Bringing stack up (build=$(-not $SkipBuild))" + Invoke-Ssh "cd '$RemotePath' && docker compose $ComposeArgs up -d $BuildFlag --remove-orphans" - Write-Step "Waiting for backend healthcheck" + # ── Health check ──────────────────────────────────────────────────────────── + Write-Step "Waiting for backend health check" $healthScript = @" set -e cd '$RemotePath' -for attempt in `$(seq 1 30); do +for i in `$(seq 1 30); do status=`$(docker inspect --format='{{if .State.Health}}{{.State.Health.Status}}{{else}}{{.State.Status}}{{end}}' lean101-clients-backend 2>/dev/null || echo missing) case "`$status" in - healthy|running) - echo "backend is `$status" - exit 0;; - *) - printf '.' - sleep 4;; + healthy|running) echo "backend is `$status"; exit 0 ;; + *) printf '.'; sleep 4 ;; esac done -echo -echo 'backend did not become healthy in time' >&2 -docker compose $composeArgs ps backend -exit 1 +echo; echo 'backend did not become healthy in time' >&2; exit 1 "@ Invoke-Ssh $healthScript + # ── Optional seed ─────────────────────────────────────────────────────────── if ($Seed) { Write-Step "Seeding reference data" - Invoke-Ssh "cd '$RemotePath' && docker compose $composeArgs exec -T backend python -m app.seed" + Invoke-Ssh "cd '$RemotePath' && docker compose $ComposeArgs exec -T backend python -m app.seed" } - Write-Step "Final container status" - Invoke-Ssh "cd '$RemotePath' && docker compose $composeArgs ps" + # ── Final status ──────────────────────────────────────────────────────────── + Write-Step "Stack status" + Invoke-Ssh "cd '$RemotePath' && docker compose $ComposeArgs ps" if ($Logs) { Write-Step "Recent logs (last 60 lines)" - Invoke-Ssh "cd '$RemotePath' && docker compose $composeArgs logs --tail=60" + Invoke-Ssh "cd '$RemotePath' && docker compose $ComposeArgs logs --tail=60" } - Write-Host "Deployment complete." -ForegroundColor Green + Write-Host "" + Write-Host "Deployment complete -> https://clients.lean-101.com.au" -ForegroundColor Green } finally { Pop-Location diff --git a/deploy/migrate-to-postgres.sh b/deploy/migrate-to-postgres.sh new file mode 100644 index 0000000..b76fb88 --- /dev/null +++ b/deploy/migrate-to-postgres.sh @@ -0,0 +1,598 @@ +#!/usr/bin/env bash +# ============================================================================= +# migrate-to-postgres.sh +# +# Migrates the lean101-clients production stack from SQLite to PostgreSQL. +# Safe: backs up everything before touching anything. Old stack stays live +# until you confirm migration succeeded. +# +# HOW TO USE: +# 1. Copy this file to the server: +# scp deploy/migrate-to-postgres.sh root@:/srv/lean101-clients/deploy/ +# +# 2. SSH in and run it directly (must be interactive — not piped): +# ssh root@ +# bash /srv/lean101-clients/deploy/migrate-to-postgres.sh +# ============================================================================= + +set -euo pipefail + +# ── Config ──────────────────────────────────────────────────────────────────── +WORK_DIR="/srv/lean101-clients" +BACKEND="lean101-clients-backend" +FRONTEND="lean101-clients-frontend" +NGINX="lean101-clients" +OLD_COMPOSE="docker-compose.yml" +NEW_COMPOSE="docker-compose.production.yml" +TIMESTAMP=$(date +%Y%m%d-%H%M%S) +BACKUP_DIR="/srv/lean101-clients-backup-$TIMESTAMP" +MIGRATE_SCRIPT="/tmp/lean101_migrate_data.py" +PG_DB_SERVICE="lean101-clients-db" +PG_IMAGE="postgres:16-alpine" + +# ── Colour helpers ──────────────────────────────────────────────────────────── +RED='\033[0;31m'; YELLOW='\033[1;33m'; GREEN='\033[0;32m' +CYAN='\033[0;36m'; BOLD='\033[1m'; RESET='\033[0m' + +sep() { echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"; } +h1() { sep; echo -e "${BOLD}${CYAN} $1${RESET}"; sep; } +ok() { echo -e " ${GREEN}✔${RESET} $1"; } +warn() { echo -e " ${YELLOW}⚠${RESET} $1"; } +die() { echo -e " ${RED}✘ FATAL: $1${RESET}" >&2; exit 1; } +info() { echo -e " ${CYAN}→${RESET} $1"; } +prompt() { echo -e "\n ${BOLD}$1${RESET}"; } + +# ── Rollback instructions ───────────────────────────────────────────────────── +print_rollback() { + echo "" + echo -e "${YELLOW}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}" + echo -e "${YELLOW} ROLLBACK — to restore the original SQLite stack:${RESET}" + echo "" + echo " cd $WORK_DIR" + echo " docker compose -f $NEW_COMPOSE --env-file .env.production down 2>/dev/null || true" + echo " docker compose -f $OLD_COMPOSE --env-file .env up -d" + echo "" + echo " SQLite backup is at: $BACKUP_DIR/data_entry_app.db" + echo -e "${YELLOW}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}" + echo "" +} +trap print_rollback ERR + +# ============================================================================= +h1 "PHASE 0 — PRE-FLIGHT" +# ============================================================================= + +[[ $EUID -eq 0 ]] || die "Run this script as root." +[[ -t 0 ]] || die "This script must be run interactively (not piped via stdin)." + +cd "$WORK_DIR" || die "Cannot cd to $WORK_DIR" + +# Check required containers are running +for C in "$BACKEND" "$FRONTEND" "$NGINX"; do + STATUS=$(docker inspect --format='{{.State.Status}}' "$C" 2>/dev/null || echo missing) + [[ "$STATUS" == "running" ]] || die "Container $C is not running (status: $STATUS). Cannot migrate." + ok "$C is running" +done + +# Confirm SQLite DB is reachable inside backend container +SQLITE_EXISTS=$(docker exec "$BACKEND" python -c \ + "import os; print('yes' if os.path.exists('/data/data_entry_app.db') else 'no')" 2>/dev/null || echo no) +[[ "$SQLITE_EXISTS" == "yes" ]] || die "SQLite DB not found at /data/data_entry_app.db inside $BACKEND" +ok "SQLite DB reachable inside backend container" + +# Check production compose file +[[ -f "$WORK_DIR/$NEW_COMPOSE" ]] && ok "$NEW_COMPOSE already present" || warn "$NEW_COMPOSE not present — will write it" + +echo "" +echo -e " ${BOLD}Everything looks good. Starting migration wizard.${RESET}" +echo "" + +# ============================================================================= +h1 "PHASE 1 — GATHER CONFIGURATION" +# ============================================================================= + +info "Reading current config from running backend container..." + +get_env() { docker exec "$BACKEND" printenv "$1" 2>/dev/null || echo ""; } + +APP_NAME=$(get_env APP_NAME) +CLIENT_NAME=$(get_env CLIENT_NAME) +CLIENT_EMAIL=$(get_env CLIENT_EMAIL) +CLIENT_TENANT_ID=$(get_env CLIENT_TENANT_ID) +ADMIN_NAME=$(get_env ADMIN_NAME) +ADMIN_EMAIL=$(get_env ADMIN_EMAIL) +CORS_ALLOW_ORIGINS=$(get_env CORS_ALLOW_ORIGINS) +ORIGIN=$(docker exec "$FRONTEND" printenv ORIGIN 2>/dev/null || echo "https://clients.lean-101.com.au") +PUBLIC_API_BASE_URL=$(docker exec "$FRONTEND" printenv PUBLIC_API_BASE_URL 2>/dev/null || echo "https://clients.lean-101.com.au") +PUBLIC_MIX_CALC_HISTORY=$(docker exec "$FRONTEND" printenv PUBLIC_MIX_CALCULATOR_SESSION_HISTORY 2>/dev/null || echo "false") +PUBLIC_MIX_CALC_SAVE=$(docker exec "$FRONTEND" printenv PUBLIC_MIX_CALCULATOR_SESSION_SAVE 2>/dev/null || echo "false") +CLIENTS_APP_PORT=$(docker inspect "$NGINX" --format='{{range $p, $conf := .NetworkSettings.Ports}}{{if $conf}}{{(index $conf 0).HostPort}}{{end}}{{end}}' 2>/dev/null || echo "8092") + +# Auth secret +EXISTING_AUTH_SECRET=$(get_env AUTH_SECRET) + +echo "" +echo " Current values extracted from container:" +echo " APP_NAME = $APP_NAME" +echo " CLIENT_NAME = $CLIENT_NAME" +echo " CLIENT_EMAIL = $CLIENT_EMAIL" +echo " CLIENT_TENANT_ID = $CLIENT_TENANT_ID" +echo " ADMIN_NAME = $ADMIN_NAME" +echo " ADMIN_EMAIL = $ADMIN_EMAIL" +echo " CORS_ALLOW_ORIGINS = $CORS_ALLOW_ORIGINS" +echo " CLIENTS_APP_PORT = $CLIENTS_APP_PORT" + +# Prompt for secrets +prompt "Enter PostgreSQL password for user 'lean101' (new — you choose this):" +read -r -s POSTGRES_PASSWORD +[[ -n "$POSTGRES_PASSWORD" ]] || die "Postgres password cannot be empty." +echo "" + +prompt "Enter CLIENT_PASSWORD (current app client password — press Enter to reuse existing):" +EXISTING_CLIENT_PW=$(get_env CLIENT_PASSWORD) +read -r -s CLIENT_PASSWORD_INPUT +echo "" +CLIENT_PASSWORD="${CLIENT_PASSWORD_INPUT:-$EXISTING_CLIENT_PW}" +[[ -n "$CLIENT_PASSWORD" ]] || die "Client password cannot be empty." + +prompt "Enter ADMIN_PASSWORD (current app admin password — press Enter to reuse existing):" +EXISTING_ADMIN_PW=$(get_env ADMIN_PASSWORD) +read -r -s ADMIN_PASSWORD_INPUT +echo "" +ADMIN_PASSWORD="${ADMIN_PASSWORD_INPUT:-$EXISTING_ADMIN_PW}" +[[ -n "$ADMIN_PASSWORD" ]] || die "Admin password cannot be empty." + +prompt "Enter AUTH_SECRET (press Enter to reuse existing: ${EXISTING_AUTH_SECRET:0:8}...):" +read -r -s AUTH_SECRET_INPUT +echo "" +AUTH_SECRET="${AUTH_SECRET_INPUT:-$EXISTING_AUTH_SECRET}" +[[ -n "$AUTH_SECRET" ]] || die "Auth secret cannot be empty." + +echo "" +ok "All credentials collected." + +# ============================================================================= +h1 "PHASE 2 — BACKUP" +# ============================================================================= + +info "Creating backup at $BACKUP_DIR ..." +mkdir -p "$BACKUP_DIR" + +# Back up SQLite DB from inside the container +info "Copying SQLite DB from container..." +docker cp "$BACKEND":/data/data_entry_app.db "$BACKUP_DIR/data_entry_app.db" +SQLITE_SIZE=$(du -sh "$BACKUP_DIR/data_entry_app.db" | cut -f1) +ok "SQLite DB backed up ($SQLITE_SIZE) → $BACKUP_DIR/data_entry_app.db" + +# Back up env and compose files +[[ -f .env ]] && cp .env "$BACKUP_DIR/.env.original" && ok ".env backed up" +[[ -f .env.alpha ]] && cp .env.alpha "$BACKUP_DIR/.env.alpha.original" +cp "$OLD_COMPOSE" "$BACKUP_DIR/$OLD_COMPOSE.original" && ok "$OLD_COMPOSE backed up" + +# Record current container state +docker ps -a > "$BACKUP_DIR/containers_before.txt" +docker volume ls > "$BACKUP_DIR/volumes_before.txt" +ok "Container/volume state recorded" + +# SQLite row counts for comparison later +info "Recording SQLite row counts..." +docker exec "$BACKEND" python -c " +import sqlite3 +conn = sqlite3.connect('/data/data_entry_app.db') +tables = conn.execute(\"SELECT name FROM sqlite_master WHERE type='table' ORDER BY name\").fetchall() +print('SQLite row counts:') +for (t,) in tables: + try: + count = conn.execute(f'SELECT COUNT(*) FROM {t}').fetchone()[0] + print(f' {t}: {count}') + except Exception as e: + print(f' {t}: ERROR ({e})') +conn.close() +" 2>/dev/null | tee "$BACKUP_DIR/sqlite_row_counts.txt" + +echo "" +ok "Backup complete at $BACKUP_DIR" + +# ============================================================================= +h1 "PHASE 3 — WRITE PRODUCTION CONFIG" +# ============================================================================= + +DATABASE_URL="postgresql+psycopg://lean101:${POSTGRES_PASSWORD}@db:5432/lean101" + +# Write .env.production +info "Writing .env.production..." +cat > "$WORK_DIR/.env.production" < "$WORK_DIR/$NEW_COMPOSE" <<'COMPOSEEOF' +services: + db: + container_name: lean101-clients-db + image: postgres:16-alpine + restart: unless-stopped + environment: + POSTGRES_USER: ${POSTGRES_USER:-lean101} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:?POSTGRES_PASSWORD is required} + POSTGRES_DB: ${POSTGRES_DB:-lean101} + volumes: + - clients_db_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-lean101} -d ${POSTGRES_DB:-lean101}"] + interval: 10s + timeout: 5s + retries: 10 + start_period: 15s + + backend: + container_name: lean101-clients-backend + build: + context: . + dockerfile: backend/Dockerfile + restart: unless-stopped + environment: + APP_NAME: ${APP_NAME:-Lean 101 Clients API} + DATABASE_URL: ${DATABASE_URL:-postgresql+psycopg://${POSTGRES_USER:-lean101}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB:-lean101}} + CLIENT_NAME: ${CLIENT_NAME:-Hunter Premium Produce} + CLIENT_EMAIL: ${CLIENT_EMAIL:-operator@example.com} + CLIENT_PASSWORD: ${CLIENT_PASSWORD:?CLIENT_PASSWORD is required} + CLIENT_TENANT_ID: ${CLIENT_TENANT_ID:-hunter-premium-produce} + ADMIN_NAME: ${ADMIN_NAME:-Lean 101} + ADMIN_EMAIL: ${ADMIN_EMAIL:-admin@lean101.local} + ADMIN_PASSWORD: ${ADMIN_PASSWORD:?ADMIN_PASSWORD is required} + AUTH_SECRET: ${AUTH_SECRET:?AUTH_SECRET is required} + CORS_ALLOW_ORIGINS: ${CORS_ALLOW_ORIGINS:-https://clients.lean-101.com.au} + depends_on: + db: + condition: service_healthy + healthcheck: + test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://127.0.0.1:8000/health')"] + interval: 30s + timeout: 5s + retries: 5 + start_period: 25s + + frontend: + container_name: lean101-clients-frontend + build: + context: . + dockerfile: frontend/Dockerfile + restart: unless-stopped + environment: + ORIGIN: ${ORIGIN:-https://clients.lean-101.com.au} + PORT: 3000 + HOST: 0.0.0.0 + PUBLIC_API_BASE_URL: ${PUBLIC_API_BASE_URL:-https://clients.lean-101.com.au} + INTERNAL_API_BASE_URL: ${INTERNAL_API_BASE_URL:-http://backend:8000} + PUBLIC_API_PORT: ${PUBLIC_API_PORT:-8000} + PUBLIC_MIX_CALCULATOR_SESSION_HISTORY: ${PUBLIC_MIX_CALCULATOR_SESSION_HISTORY:-false} + PUBLIC_MIX_CALCULATOR_SESSION_SAVE: ${PUBLIC_MIX_CALCULATOR_SESSION_SAVE:-false} + depends_on: + backend: + condition: service_healthy + + nginx: + container_name: lean101-clients + image: nginx:1.27-alpine + restart: unless-stopped + depends_on: + frontend: + condition: service_started + backend: + condition: service_healthy + ports: + - "${CLIENTS_APP_PORT:-8092}:80" + volumes: + - ./deploy/nginx/clients.lean-101.conf:/etc/nginx/conf.d/default.conf:ro + +volumes: + clients_db_data: +COMPOSEEOF + ok "$NEW_COMPOSE written" +else + ok "$NEW_COMPOSE already exists — not overwritten" +fi + +# ============================================================================= +h1 "PHASE 4 — START POSTGRES" +# ============================================================================= + +info "Starting database service from $NEW_COMPOSE..." +docker compose --env-file .env.production -f "$NEW_COMPOSE" up -d db + +info "Waiting for Postgres to be healthy (up to 60s)..." +for i in $(seq 1 30); do + HEALTH=$(docker inspect --format='{{if .State.Health}}{{.State.Health.Status}}{{else}}starting{{end}}' "$PG_DB_SERVICE" 2>/dev/null || echo missing) + if [[ "$HEALTH" == "healthy" ]]; then + ok "Postgres is healthy" + break + fi + printf " attempt %d/30: %s\n" "$i" "$HEALTH" + sleep 2 + if [[ $i -eq 30 ]]; then + docker logs "$PG_DB_SERVICE" --tail=20 + die "Postgres did not become healthy in time." + fi +done + +# ============================================================================= +h1 "PHASE 5 — BOOTSTRAP POSTGRES SCHEMA" +# ============================================================================= + +info "Running bootstrap_schema on Postgres via backend container..." + +docker exec \ + -e DATABASE_URL="$DATABASE_URL" \ + "$BACKEND" \ + python -c " +import os +from sqlalchemy import create_engine +from app.db.migrations import bootstrap_schema +from app.db.session import Base +import app.models # registers all models onto Base.metadata + +url = os.environ['DATABASE_URL'] +print(f' Connecting to: {url.split(\"@\")[1] if \"@\" in url else url}') +pg_engine = create_engine(url) +result = bootstrap_schema(pg_engine, Base.metadata) +print(f' Result: {result.summary()}') +" + +ok "Postgres schema bootstrapped" + +# ============================================================================= +h1 "PHASE 6 — MIGRATE DATA (SQLite → PostgreSQL)" +# ============================================================================= + +info "Writing Python migration script..." + +cat > "$MIGRATE_SCRIPT" <<'PYEOF' +#!/usr/bin/env python3 +""" +Migrate all rows from SQLite (/data/data_entry_app.db) to PostgreSQL. +Runs inside the lean101-clients-backend container. +""" +import os +import sys +from sqlalchemy import create_engine, text, inspect + +SQLITE_URL = "sqlite:////data/data_entry_app.db" +PG_URL = os.environ["PG_DATABASE_URL"] + +# FK-safe insertion order based on model relationships +TABLE_ORDER = [ + "roles", + "permissions", + "role_permissions", + "users", + "client_accounts", + "client_users", + "client_feature_access", + "client_user_module_permissions", + "client_access_audit_events", + "raw_materials", + "raw_material_price_versions", + "mixes", + "mix_ingredients", + "process_cost_rules", + "packaging_cost_rules", + "freight_cost_rules", + "products", + "scenarios", + "costing_results", + "mix_calculator_sessions", + "mix_calculator_session_lines", +] + +def migrate(): + from app.db.session import Base + import app.models # registers all models onto Base.metadata + + src = create_engine(SQLITE_URL, connect_args={"check_same_thread": False}) + dst = create_engine(PG_URL) + + sqlite_tables = set(inspect(src).get_table_names()) + print(f"\nFound {len(sqlite_tables)} tables in SQLite: {', '.join(sorted(sqlite_tables))}\n") + + totals = {} + + with dst.begin() as dst_conn: + # Disable FK checks for bulk insert + dst_conn.execute(text("SET session_replication_role = 'replica'")) + + for table_name in TABLE_ORDER: + if table_name not in sqlite_tables: + print(f" SKIP {table_name:<45} (not in SQLite)") + continue + + table = Base.metadata.tables.get(table_name) + if table is None: + print(f" SKIP {table_name:<45} (not in SQLAlchemy metadata)") + continue + + try: + with src.connect() as src_conn: + rows = src_conn.execute(table.select()).fetchall() + except Exception as e: + print(f" ERROR {table_name:<45} SQLite read failed: {e}") + continue + + if not rows: + print(f" SKIP {table_name:<45} (0 rows)") + continue + + try: + dst_conn.execute(table.insert(), [dict(row._mapping) for row in rows]) + print(f" OK {table_name:<45} {len(rows):>6} rows") + totals[table_name] = len(rows) + except Exception as e: + print(f" ERROR {table_name:<45} Insert failed: {e}") + sys.exit(1) + + # Re-enable FK checks + dst_conn.execute(text("SET session_replication_role = 'origin'")) + + # Reset auto-increment sequences + print("\n Resetting sequences...") + with dst.begin() as conn: + for table_name in TABLE_ORDER: + try: + conn.execute(text( + f"SELECT setval(" + f" pg_get_serial_sequence('{table_name}', 'id')," + f" COALESCE((SELECT MAX(id) FROM {table_name}), 1)" + f")" + )) + except Exception: + pass + + print(f"\n Migration complete. {sum(totals.values())} rows across {len(totals)} tables.") + return totals + +if __name__ == "__main__": + migrate() +PYEOF + +info "Copying migration script into backend container..." +docker cp "$MIGRATE_SCRIPT" "$BACKEND:$MIGRATE_SCRIPT" + +info "Running migration..." +docker exec \ + -e DATABASE_URL="$DATABASE_URL" \ + -e PG_DATABASE_URL="$DATABASE_URL" \ + "$BACKEND" \ + python "$MIGRATE_SCRIPT" + +ok "Data migration complete" + +# ============================================================================= +h1 "PHASE 7 — VERIFY MIGRATION" +# ============================================================================= + +info "Postgres row counts:" +docker exec "$PG_DB_SERVICE" psql -U lean101 -d lean101 -c " +SELECT + relname AS table_name, + n_live_tup AS row_count +FROM pg_stat_user_tables +ORDER BY n_live_tup DESC; +" | sed 's/^/ /' + +echo "" +echo " SQLite row counts (from backup):" +cat "$BACKUP_DIR/sqlite_row_counts.txt" | sed 's/^/ /' + +echo "" +prompt "Review the counts above. Do they match? Type 'yes' to proceed with cutover, anything else to abort:" +read -r CONFIRM +if [[ "$CONFIRM" != "yes" ]]; then + echo "" + warn "Cutover aborted by user. Old SQLite stack is still running." + warn "Postgres is running but old stack is untouched." + echo "" + echo " To retry from data migration step:" + echo " bash $0" + echo "" + echo " To tear down the Postgres container:" + echo " docker compose -f $NEW_COMPOSE --env-file .env.production down" + exit 0 +fi + +# ============================================================================= +h1 "PHASE 8 — CUTOVER (Stop SQLite stack, Start Postgres stack)" +# ============================================================================= + +info "Stopping old SQLite stack (backend, frontend, nginx)..." +docker stop "$BACKEND" "$FRONTEND" "$NGINX" 2>/dev/null || true +docker rm "$BACKEND" "$FRONTEND" "$NGINX" 2>/dev/null || true +ok "Old containers stopped and removed" + +info "Starting full production stack from $NEW_COMPOSE..." +docker compose --env-file .env.production -f "$NEW_COMPOSE" up -d --build + +info "Waiting for production backend to become healthy (up to 90s)..." +for i in $(seq 1 30); do + HEALTH=$(docker inspect --format='{{if .State.Health}}{{.State.Health.Status}}{{else}}starting{{end}}' "$BACKEND" 2>/dev/null || echo missing) + if [[ "$HEALTH" == "healthy" ]]; then + ok "Backend healthy" + break + fi + printf " attempt %d/30: %s\n" "$i" "$HEALTH" + sleep 3 + if [[ $i -eq 30 ]]; then + docker logs "$BACKEND" --tail=30 + die "Backend did not become healthy. Check logs above. Run rollback if needed." + fi +done + +# ============================================================================= +h1 "PHASE 9 — FINAL VERIFICATION" +# ============================================================================= + +info "Stack status:" +docker compose --env-file .env.production -f "$NEW_COMPOSE" ps | sed 's/^/ /' + +echo "" +info "Backend health endpoint:" +docker exec "$BACKEND" python -c \ + "import urllib.request; r=urllib.request.urlopen('http://127.0.0.1:8000/health'); print(' ', r.read().decode())" + +echo "" +info "DATABASE_URL in production backend:" +docker exec "$BACKEND" printenv DATABASE_URL | \ + sed 's|://[^:]*:[^@]*@|://***:***@|g' | sed 's/^/ /' + +echo "" +info "Final Postgres table row counts:" +docker exec "$PG_DB_SERVICE" psql -U lean101 -d lean101 -c " +SELECT relname AS table_name, n_live_tup AS rows +FROM pg_stat_user_tables +ORDER BY n_live_tup DESC; +" | sed 's/^/ /' + +# Clean up migration script from container +docker exec "$BACKEND" rm -f "$MIGRATE_SCRIPT" 2>/dev/null || true +rm -f "$MIGRATE_SCRIPT" + +# Remove the error trap since we succeeded +trap - ERR + +sep +echo -e "${GREEN}${BOLD} Migration complete!${RESET}" +echo "" +echo -e " ${BOLD}Production is now running on PostgreSQL.${RESET}" +echo "" +echo " Backup preserved at: $BACKUP_DIR" +echo " SQLite volume (lean101-clients_clients_app_data) was NOT deleted." +echo " Once you're confident in production, you can remove it with:" +echo " docker volume rm lean101-clients_clients_app_data" +echo "" +echo " Next steps:" +echo " 1. Verify the app at https://clients.lean-101.com.au" +echo " 2. Bootstrap git repo: ./deploy/Deploy.ps1 -RemoteHost -Bootstrap -RepoUrl " +echo " 3. Future deploys use: ./deploy/Deploy.ps1 -RemoteHost " +sep +echo "" diff --git a/deploy/predeployment-check.sh b/deploy/predeployment-check.sh new file mode 100644 index 0000000..6883fdb --- /dev/null +++ b/deploy/predeployment-check.sh @@ -0,0 +1,306 @@ +#!/usr/bin/env bash +# ============================================================================= +# predeployment-check.sh +# +# Run on the production server to capture full environment state before making +# changes. Safe — read-only, no writes, no restarts. +# +# Usage (from local machine): +# ssh root@ 'bash -s' < deploy/predeployment-check.sh +# +# Usage (on the server directly): +# bash /srv/lean101-clients/deploy/predeployment-check.sh +# ============================================================================= + +set -euo pipefail + +REMOTE_PATH="${REMOTE_PATH:-/srv/lean101-clients}" +COMPOSE_FILE="${COMPOSE_FILE:-docker-compose.production.yml}" +ENV_FILE="${ENV_FILE:-.env.production}" +BACKEND_CONTAINER="lean101-clients-backend" +FRONTEND_CONTAINER="lean101-clients-frontend" +NGINX_CONTAINER="lean101-clients" +DB_CONTAINER="lean101-clients-db" + +# Colours +RED='\033[0;31m'; YELLOW='\033[1;33m'; GREEN='\033[0;32m' +CYAN='\033[0;36m'; BOLD='\033[1m'; RESET='\033[0m' + +sep() { echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"; } +h1() { sep; echo -e "${BOLD}${CYAN} $1${RESET}"; sep; } +ok() { echo -e " ${GREEN}✔${RESET} $1"; } +warn() { echo -e " ${YELLOW}⚠${RESET} $1"; } +fail() { echo -e " ${RED}✘${RESET} $1"; } +kv() { printf " %-30s %s\n" "$1" "$2"; } + +# Helper: run a command inside a container, return empty string on failure +cexec() { + local container="$1"; shift + docker exec "$container" "$@" 2>/dev/null || true +} + +echo "" +echo -e "${BOLD} LEAN 101 CLIENTS — Pre-Deployment Check${RESET}" +echo -e " Generated: $(date -u '+%Y-%m-%d %H:%M:%S UTC')" +echo -e " Host: $(hostname -f 2>/dev/null || hostname)" +echo "" + +# ============================================================================= +h1 "1. HOST SYSTEM" +# ============================================================================= +kv "OS:" "$(. /etc/os-release 2>/dev/null && echo "$PRETTY_NAME" || uname -s)" +kv "Kernel:" "$(uname -r)" +kv "Uptime:" "$(uptime -p 2>/dev/null || uptime)" + +echo "" +echo " Disk usage (df -h):" +df -h --output=source,size,used,avail,pcent,target 2>/dev/null | grep -v tmpfs | grep -v udev | sed 's/^/ /' + +echo "" +echo " Memory (free -h):" +free -h 2>/dev/null | sed 's/^/ /' || vm_stat 2>/dev/null | sed 's/^/ /' + +# ============================================================================= +h1 "2. DOCKER & COMPOSE" +# ============================================================================= +if command -v docker &>/dev/null; then + ok "Docker installed" + kv "Docker version:" "$(docker --version)" + kv "Compose version:" "$(docker compose version 2>/dev/null || docker-compose --version 2>/dev/null || echo 'not found')" +else + fail "Docker not found on PATH" +fi + +# ============================================================================= +h1 "3. REPOSITORY STATE" +# ============================================================================= +if [ -d "$REMOTE_PATH/.git" ]; then + cd "$REMOTE_PATH" + ok "Repo found at $REMOTE_PATH" + kv "Branch:" "$(git rev-parse --abbrev-ref HEAD 2>/dev/null)" + kv "Latest commit:" "$(git log -1 --format='%h %s (%cr)' 2>/dev/null)" + kv "Commit author:" "$(git log -1 --format='%an <%ae>' 2>/dev/null)" + kv "Remote origin:" "$(git remote get-url origin 2>/dev/null)" + + DIRTY=$(git status --porcelain 2>/dev/null) + if [ -n "$DIRTY" ]; then + warn "Working tree has uncommitted changes:" + git status --short | sed 's/^/ /' + else + ok "Working tree clean" + fi + + echo "" + echo " Recent commits (last 5):" + git log -5 --format=' %h %s (%cr)' 2>/dev/null +else + fail "No git repo found at $REMOTE_PATH" +fi + +# ============================================================================= +h1 "4. ENVIRONMENT FILE" +# ============================================================================= +ENV_PATH="$REMOTE_PATH/$ENV_FILE" +if [ -f "$ENV_PATH" ]; then + ok "Env file: $ENV_PATH" + kv "Modified:" "$(stat -c '%y' "$ENV_PATH" 2>/dev/null | cut -d'.' -f1 || stat -f '%Sm' "$ENV_PATH" 2>/dev/null)" + kv "Permissions:" "$(stat -c '%a %U:%G' "$ENV_PATH" 2>/dev/null || stat -f '%Sp %Su:%Sg' "$ENV_PATH" 2>/dev/null)" + echo "" + echo " Env keys present (values redacted):" + grep -v '^#' "$ENV_PATH" | grep '=' | cut -d'=' -f1 | sort | sed 's/^/ /' +else + fail "Env file NOT found at $ENV_PATH" +fi + +# ============================================================================= +h1 "5. DOCKER STACK — CONTAINER STATUS" +# ============================================================================= +cd "$REMOTE_PATH" 2>/dev/null || true + +echo " All containers on this host:" +docker ps -a --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}\t{{.Ports}}' | sed 's/^/ /' + +echo "" +echo " Compose stack status ($COMPOSE_FILE):" +docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" ps 2>/dev/null | sed 's/^/ /' || \ + warn "Could not run docker compose ps (compose file or env file missing?)" + +# Per-container inspection +for CNAME in "$BACKEND_CONTAINER" "$FRONTEND_CONTAINER" "$NGINX_CONTAINER" "$DB_CONTAINER"; do + STATUS=$(docker inspect --format='{{.State.Status}}' "$CNAME" 2>/dev/null || echo "missing") + HEALTH=$(docker inspect --format='{{if .State.Health}}{{.State.Health.Status}}{{else}}n/a{{end}}' "$CNAME" 2>/dev/null || echo "missing") + IMAGE=$(docker inspect --format='{{.Config.Image}}' "$CNAME" 2>/dev/null || echo "unknown") + STARTED=$(docker inspect --format='{{.State.StartedAt}}' "$CNAME" 2>/dev/null | cut -d'.' -f1 || echo "unknown") + + echo "" + echo -e " ${BOLD}$CNAME${RESET}" + kv " Status:" "$STATUS" + kv " Health:" "$HEALTH" + kv " Image:" "$IMAGE" + kv " Started:" "$STARTED" + + if [ "$STATUS" = "running" ]; then + MEM=$(docker stats "$CNAME" --no-stream --format '{{.MemUsage}}' 2>/dev/null || echo "n/a") + CPU=$(docker stats "$CNAME" --no-stream --format '{{.CPUPerc}}' 2>/dev/null || echo "n/a") + kv " CPU / Mem:" "$CPU / $MEM" + fi +done + +# ============================================================================= +h1 "6. DOCKER VOLUMES" +# ============================================================================= +echo " Named volumes:" +docker volume ls --format 'table {{.Name}}\t{{.Driver}}\t{{.Mountpoint}}' | grep -i lean | sed 's/^/ /' || echo " (none matching lean)" + +echo "" +echo " All volumes:" +docker volume ls --format ' {{.Name}}' | head -30 + +# Postgres data volume size +PG_MOUNT=$(docker volume inspect lean101-clients_clients_db_data --format '{{.Mountpoint}}' 2>/dev/null || \ + docker volume inspect clients_db_data --format '{{.Mountpoint}}' 2>/dev/null || true) +if [ -n "$PG_MOUNT" ]; then + PG_SIZE=$(du -sh "$PG_MOUNT" 2>/dev/null | cut -f1 || echo "n/a") + kv " Postgres volume size:" "$PG_SIZE ($PG_MOUNT)" +fi + +# ============================================================================= +h1 "7. POSTGRESQL — DATABASE STATE" +# ============================================================================= +DB_STATUS=$(docker inspect --format='{{.State.Status}}' "$DB_CONTAINER" 2>/dev/null || echo "missing") + +if [ "$DB_STATUS" = "running" ]; then + ok "Database container is running" + + PG_USER=$(docker exec "$DB_CONTAINER" printenv POSTGRES_USER 2>/dev/null || echo "lean101") + PG_DB=$(docker exec "$DB_CONTAINER" printenv POSTGRES_DB 2>/dev/null || echo "lean101") + + echo "" + echo " PostgreSQL version:" + cexec "$DB_CONTAINER" psql -U "$PG_USER" -d "$PG_DB" -c "SELECT version();" | sed 's/^/ /' + + echo "" + echo " Database size:" + cexec "$DB_CONTAINER" psql -U "$PG_USER" -d "$PG_DB" \ + -c "SELECT pg_database.datname, pg_size_pretty(pg_database_size(pg_database.datname)) AS size FROM pg_database ORDER BY pg_database_size(pg_database.datname) DESC;" \ + | sed 's/^/ /' + + echo "" + echo " Tables and row counts:" + cexec "$DB_CONTAINER" psql -U "$PG_USER" -d "$PG_DB" -c " + SELECT + schemaname, + relname AS table_name, + n_live_tup AS row_count, + pg_size_pretty(pg_total_relation_size(quote_ident(schemaname)||'.'||quote_ident(relname))) AS total_size + FROM pg_stat_user_tables + ORDER BY n_live_tup DESC; + " | sed 's/^/ /' + + echo "" + echo " Alembic migration state:" + cexec "$DB_CONTAINER" psql -U "$PG_USER" -d "$PG_DB" -c \ + "SELECT version_num, is_current FROM alembic_version LEFT JOIN (SELECT true AS is_current) t ON true;" \ + 2>/dev/null | sed 's/^/ /' || \ + cexec "$DB_CONTAINER" psql -U "$PG_USER" -d "$PG_DB" -c \ + "SELECT version_num FROM alembic_version;" 2>/dev/null | sed 's/^/ /' || \ + warn "alembic_version table not found or inaccessible" + + echo "" + echo " Active connections:" + cexec "$DB_CONTAINER" psql -U "$PG_USER" -d "$PG_DB" -c " + SELECT count(*) AS total_connections, + sum(CASE WHEN state = 'active' THEN 1 ELSE 0 END) AS active + FROM pg_stat_activity + WHERE datname = '$PG_DB'; + " | sed 's/^/ /' +else + fail "Database container status: $DB_STATUS — skipping DB checks" +fi + +# ============================================================================= +h1 "8. BACKEND — HEALTH & API" +# ============================================================================= +if [ "$(docker inspect --format='{{.State.Status}}' "$BACKEND_CONTAINER" 2>/dev/null)" = "running" ]; then + echo " Health endpoint (internal):" + HEALTH_RESP=$(cexec "$BACKEND_CONTAINER" python -c \ + "import urllib.request, json; r=urllib.request.urlopen('http://127.0.0.1:8000/health'); print(r.read().decode())" 2>/dev/null || echo "failed") + echo " $HEALTH_RESP" + + echo "" + echo " Python / package versions inside backend container:" + cexec "$BACKEND_CONTAINER" python --version 2>&1 | sed 's/^/ /' + cexec "$BACKEND_CONTAINER" pip show fastapi sqlalchemy alembic psycopg 2>/dev/null | \ + grep -E '^(Name|Version):' | sed 's/^/ /' + + echo "" + echo " DATABASE_URL in backend (secret masked):" + cexec "$BACKEND_CONTAINER" printenv DATABASE_URL | \ + sed 's|://[^:]*:[^@]*@|://***:***@|g' | sed 's/^/ /' +else + fail "Backend container not running — skipping API checks" +fi + +# ============================================================================= +h1 "9. NGINX — CONFIGURATION" +# ============================================================================= +if [ "$(docker inspect --format='{{.State.Status}}' "$NGINX_CONTAINER" 2>/dev/null)" = "running" ]; then + ok "Nginx container is running" + echo "" + echo " nginx -t output:" + cexec "$NGINX_CONTAINER" nginx -t 2>&1 | sed 's/^/ /' + echo "" + echo " Active listening ports (nginx container):" + docker exec "$NGINX_CONTAINER" sh -c 'cat /etc/nginx/conf.d/default.conf 2>/dev/null | grep -E "listen|server_name|proxy_pass"' | sed 's/^/ /' || true + echo "" + echo " Host port binding:" + docker port "$NGINX_CONTAINER" | sed 's/^/ /' +else + warn "Nginx container not running" +fi + +# ============================================================================= +h1 "10. RECENT CONTAINER LOGS (last 30 lines each)" +# ============================================================================= +for CNAME in "$BACKEND_CONTAINER" "$FRONTEND_CONTAINER" "$NGINX_CONTAINER" "$DB_CONTAINER"; do + STATUS=$(docker inspect --format='{{.State.Status}}' "$CNAME" 2>/dev/null || echo "missing") + echo "" + echo -e " ${BOLD}$CNAME${RESET} [$STATUS]" + if [ "$STATUS" != "missing" ]; then + docker logs "$CNAME" --tail=30 2>&1 | sed 's/^/ /' + fi +done + +# ============================================================================= +h1 "11. HOST NETWORK & FIREWALL" +# ============================================================================= +echo " Listening ports on host (ss -tlnp):" +ss -tlnp 2>/dev/null | sed 's/^/ /' || \ +netstat -tlnp 2>/dev/null | sed 's/^/ /' || \ + warn "Neither ss nor netstat available" + +echo "" +echo " UFW status:" +ufw status 2>/dev/null | sed 's/^/ /' || warn "UFW not available" + +echo "" +echo " Docker network list:" +docker network ls | sed 's/^/ /' + +# ============================================================================= +h1 "12. COMPOSE FILE DIFF (local vs what would deploy)" +# ============================================================================= +echo " Compose file on server ($COMPOSE_FILE):" +if [ -f "$REMOTE_PATH/$COMPOSE_FILE" ]; then + kv " Modified:" "$(stat -c '%y' "$REMOTE_PATH/$COMPOSE_FILE" 2>/dev/null | cut -d'.' -f1)" + kv " Size:" "$(wc -l < "$REMOTE_PATH/$COMPOSE_FILE") lines" + ok "File exists" +else + fail "$COMPOSE_FILE not found at $REMOTE_PATH" +fi + +# ============================================================================= +sep +echo -e "${BOLD} Check complete — paste this output into Claude for analysis.${RESET}" +sep +echo "" diff --git a/frontend/package.json b/frontend/package.json index e3d934c..91989cf 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,12 +1,12 @@ { "name": "data-entry-app-frontend", - "version": "0.2.0", + "version": "1.5.6", "private": true, "type": "module", "scripts": { - "dev": "vite dev", - "build": "vite build", - "preview": "vite preview", + "dev": "node -r ./scripts/vite-windows-eperm-workaround.cjs ./node_modules/vite/bin/vite.js dev", + "build": "node -r ./scripts/vite-windows-eperm-workaround.cjs ./node_modules/vite/bin/vite.js build", + "preview": "node -r ./scripts/vite-windows-eperm-workaround.cjs ./node_modules/vite/bin/vite.js preview", "test": "vitest run" }, "devDependencies": { diff --git a/frontend/scripts/vite-windows-eperm-workaround.cjs b/frontend/scripts/vite-windows-eperm-workaround.cjs new file mode 100644 index 0000000..cf867c6 --- /dev/null +++ b/frontend/scripts/vite-windows-eperm-workaround.cjs @@ -0,0 +1,46 @@ +const childProcess = require('node:child_process'); + +const originalExec = childProcess.exec; + +childProcess.exec = function patchedExec(command, options, callback) { + const normalizedCommand = typeof command === 'string' ? command.trim().toLowerCase() : ''; + + try { + return originalExec.call(this, command, options, callback); + } catch (error) { + if (normalizedCommand === 'net use' && error && error.code === 'EPERM') { + const cb = + typeof options === 'function' + ? options + : typeof callback === 'function' + ? callback + : null; + + if (cb) { + process.nextTick(() => cb(error, '', '')); + } + + return { + pid: undefined, + killed: false, + kill() { + return false; + }, + on() { + return this; + }, + once() { + return this; + }, + emit() { + return false; + }, + removeListener() { + return this; + } + }; + } + + throw error; + } +}; diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index d8e3810..411cc53 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -224,6 +224,34 @@ async function request( } } +async function requestBlob( + path: string, + auth: AuthMode = 'none', + fetcher: ApiFetch = fetch +): Promise { + try { + const token = getToken(auth); + const response = await fetcher(buildApiUrl(path), { + headers: token ? { Authorization: `Bearer ${token}` } : undefined + }); + + if (!response.ok) { + let message = 'Request failed'; + try { + const body = (await response.json()) as { detail?: string }; + message = body.detail ?? message; + } catch { + message = response.statusText || message; + } + throw new Error(message); + } + + return await response.blob(); + } catch (error) { + throw normalizeRequestError(error); + } +} + export const api = { rawMaterials: (fetcher?: ApiFetch) => cachedFetchJson('/api/raw-materials', mockRawMaterials, 'client', fetcher), mixes: (fetcher?: ApiFetch) => cachedFetchJson('/api/mixes', mockMixes, 'client', fetcher), @@ -234,6 +262,8 @@ export const api = { cachedFetchJson('/api/mix-calculator', mockMixCalculatorSessions, 'client', fetcher), mixCalculatorSession: (sessionId: number, fetcher?: ApiFetch) => request(`/api/mix-calculator/${sessionId}`, { method: 'GET' }, 'client', fetcher), + mixCalculatorSessionPdf: (sessionId: number, fetcher?: ApiFetch) => + requestBlob(`/api/mix-calculator/${sessionId}/pdf`, 'client', fetcher), previewMixCalculatorSession: (payload: MixCalculatorCreateInput) => request('/api/mix-calculator/preview', { method: 'POST', diff --git a/frontend/src/lib/components/ClientShell.svelte b/frontend/src/lib/components/ClientShell.svelte index d5189ce..bbb183a 100644 --- a/frontend/src/lib/components/ClientShell.svelte +++ b/frontend/src/lib/components/ClientShell.svelte @@ -1,137 +1,38 @@ + + + + diff --git a/frontend/src/lib/components/MixCalculatorPrintSheet.svelte b/frontend/src/lib/components/MixCalculatorPrintSheet.svelte index 51bf247..774c18f 100644 --- a/frontend/src/lib/components/MixCalculatorPrintSheet.svelte +++ b/frontend/src/lib/components/MixCalculatorPrintSheet.svelte @@ -1,29 +1,25 @@ @@ -33,118 +29,21 @@ diff --git a/frontend/src/lib/components/MixCalculatorWorkspace.svelte b/frontend/src/lib/components/mix-calculator/MixCalculatorEditor.svelte similarity index 52% rename from frontend/src/lib/components/MixCalculatorWorkspace.svelte rename to frontend/src/lib/components/mix-calculator/MixCalculatorEditor.svelte index 0926151..0d62014 100644 --- a/frontend/src/lib/components/MixCalculatorWorkspace.svelte +++ b/frontend/src/lib/components/mix-calculator/MixCalculatorEditor.svelte @@ -1,6 +1,7 @@ + + + + diff --git a/frontend/src/lib/components/mix-calculator/MixCalculatorResultsPanel.svelte b/frontend/src/lib/components/mix-calculator/MixCalculatorResultsPanel.svelte new file mode 100644 index 0000000..afcd2f7 --- /dev/null +++ b/frontend/src/lib/components/mix-calculator/MixCalculatorResultsPanel.svelte @@ -0,0 +1,450 @@ + + +
+
+
+

Calculated Output

+

{preview ? 'Snapshot of the scaled raw material requirements.' : 'Run the calculation to preview the session output.'}

+
+ {#if sessionNumber} +
+ Session + {sessionNumber} +
+ {/if} +
+ + {#if preview} +
+
+ Total kg + {formatNumber(preview.total_kg, 2)} +

Scaled batch size

+
+
+ Total bags + {formatNumber(preview.total_bags, 2)} +

{preview.product_unit_of_measure}

+
+
+ Prepared by + {preview.prepared_by_name} +

{formatDate(preview.mix_date)}

+
+
+ + {#if preview.warnings.length} +
+ {#each preview.warnings as warning} +

{warning}

+ {/each} +
+ {/if} + +
+
+ Client + {preview.client_name} +
+
+ Product + {preview.product_name} +
+
+ Mix source + {preview.mix_name} +
+
+ Unit size + {formatNumber(preview.product_unit_size_kg, 2)}kg +
+
+ +
+ + + + + + + + + + + {#each preview.lines as line} + + + + + + + {/each} + +
Raw materialMix %Required kgUnit
+ {line.raw_material_name} + {formatNumber(line.mix_percentage, 2)}%{formatNumber(line.required_kg, 2)}kg{line.unit}
+
+ {:else} +
+
+
+
+
+
+
+ + No calculation yet + Choose a client, product, date, and batch size on the left, then click Calculate mix. +
+
+ {#each [1,2,3,4,5] as _} +
+
+
+
+
+
+ {/each} +
+
+ {/if} +
+ + diff --git a/frontend/src/lib/components/MixWorkspace.svelte b/frontend/src/lib/components/mixes/MixEditor.svelte similarity index 92% rename from frontend/src/lib/components/MixWorkspace.svelte rename to frontend/src/lib/components/mixes/MixEditor.svelte index 2446b93..6aa9a57 100644 --- a/frontend/src/lib/components/MixWorkspace.svelte +++ b/frontend/src/lib/components/mixes/MixEditor.svelte @@ -28,8 +28,6 @@ let mixVersion = $state(getInitialMix()?.version ?? 1); let mixNotes = $state(getInitialMix()?.notes ?? ''); let draftIngredients = $state([]); - let feedback = $state(''); - let errorMessage = $state(''); let isSaving = $state(false); function currency(value: number | null | undefined, digits = 2) { @@ -86,8 +84,6 @@ loadDraftFromMix(getInitialMix()); function resetDraft() { - feedback = ''; - errorMessage = ''; loadDraftFromMix(savedMix); } @@ -318,14 +314,6 @@ - {#if feedback} - - {/if} - - {#if errorMessage} - - {/if} -
Live Draft Kg @@ -567,7 +555,6 @@ .locked-card, .page-intro, - .feedback, .metric-card, .editor-card, .summary-card { @@ -579,7 +566,6 @@ .locked-card, .page-intro, - .feedback, .metric-row, .editor-grid { margin-bottom: 1.12rem; @@ -663,23 +649,6 @@ cursor: wait; } - .feedback { - padding: 0.86rem 0.94rem; - font-weight: 600; - } - - .feedback.success { - color: var(--green-deep); - border-color: #d8ecdf; - background: #f6fcf8; - } - - .feedback.error { - color: #a03737; - border-color: #f0d9d9; - background: #fff8f8; - } - .metric-row, .editor-grid, .meta-grid, @@ -841,76 +810,54 @@ .factor-list strong, .healthy-card strong { display: block; - margin-bottom: 0.22rem; - font-size: 0.94rem; + margin-bottom: 0.28rem; + font-size: 0.96rem; font-weight: 700; } - .warning-list article, - .healthy-card { - padding: 0.9rem 0.94rem; - border-radius: 0.92rem; - } - .warning-list article { - border: 1px solid #f1e2c2; - background: #fffaf2; - color: #8d5d21; - font-weight: 500; + padding: 0.84rem 0.9rem; + border: 1px solid #f0d8d8; + border-radius: 0.92rem; + background: #fff7f7; + color: #9a4747; + font-size: 0.86rem; + font-weight: 600; } .healthy-card { - border: 1px solid var(--line); - background: var(--panel-soft); + padding: 0.95rem 1rem; + border: 1px solid #d9ecdf; + border-radius: 0.96rem; + background: #f6fcf8; } - @media (max-width: 1240px) { + @media (max-width: 980px) { + .metric-row, .editor-grid { grid-template-columns: 1fr; } - - .sidebar-stack { - grid-template-columns: repeat(3, minmax(0, 1fr)); - } } - @media (max-width: 1180px) { - .metric-row { - grid-template-columns: 1fr; - } - - .meta-grid { - grid-template-columns: repeat(2, minmax(0, 1fr)); - } - } - - @media (max-width: 760px) { + @media (max-width: 720px) { .page-intro, .section-heading, .intro-actions, .editor-actions { flex-direction: column; - align-items: flex-start; - } - - .intro-actions, - .editor-actions, - .primary-button, - .secondary-button { - width: 100%; + align-items: stretch; } + .meta-grid, .summary-grid { grid-template-columns: 1fr; } - .meta-grid, - .sidebar-stack { - grid-template-columns: 1fr; + .sheet-table { + min-width: 0; + border-spacing: 0; } - } - @media (max-width: 880px) { .sheet-table, .sheet-table thead, .sheet-table tbody, @@ -920,61 +867,44 @@ width: 100%; } - .sheet-table { - min-width: 0; - border-spacing: 0; - } - .sheet-table thead { display: none; } .sheet-table tbody { display: grid; - gap: 0.9rem; + gap: 0.75rem; } .sheet-table tbody tr { - padding: 0.35rem; border: 1px solid var(--line); border-radius: 1rem; background: var(--panel-soft); + overflow: hidden; } .sheet-table tbody td { - padding: 0.78rem 0.8rem; + border: none; + border-bottom: 1px solid var(--line); + border-left: none; + border-right: none; + border-radius: 0; white-space: normal; - border: none; - border-radius: 0; - background: transparent; } - .sheet-table tbody td:first-child, .sheet-table tbody td:last-child { - border: none; - border-radius: 0; - } - - .sheet-table tbody td + td { - border-top: 1px solid var(--line); + border-bottom: none; } .sheet-table tbody td::before { content: attr(data-label); display: block; - margin-bottom: 0.35rem; + margin-bottom: 0.24rem; color: var(--muted); font-size: 0.72rem; font-weight: 700; letter-spacing: 0.06em; text-transform: uppercase; } - - .sheet-table input, - .sheet-table select, - .icon-delete { - width: 100%; - min-width: 0; - } } diff --git a/frontend/src/lib/components/navigation/AppNavSection.svelte b/frontend/src/lib/components/navigation/AppNavSection.svelte new file mode 100644 index 0000000..0543374 --- /dev/null +++ b/frontend/src/lib/components/navigation/AppNavSection.svelte @@ -0,0 +1,129 @@ + + +{#if label} + +{/if} + + + + diff --git a/frontend/src/lib/components/navigation/AppSecondaryRail.svelte b/frontend/src/lib/components/navigation/AppSecondaryRail.svelte new file mode 100644 index 0000000..a381bde --- /dev/null +++ b/frontend/src/lib/components/navigation/AppSecondaryRail.svelte @@ -0,0 +1,200 @@ + + + + + diff --git a/frontend/src/lib/components/navigation/AppSecondaryRailLayout.svelte b/frontend/src/lib/components/navigation/AppSecondaryRailLayout.svelte new file mode 100644 index 0000000..c8653d2 --- /dev/null +++ b/frontend/src/lib/components/navigation/AppSecondaryRailLayout.svelte @@ -0,0 +1,81 @@ + + +
+ + +
+
+ {@render children()} +
+
+
+ + diff --git a/frontend/src/lib/components/navigation/ClientPrimaryRail.svelte b/frontend/src/lib/components/navigation/ClientPrimaryRail.svelte new file mode 100644 index 0000000..8650b59 --- /dev/null +++ b/frontend/src/lib/components/navigation/ClientPrimaryRail.svelte @@ -0,0 +1,251 @@ + + + + + diff --git a/frontend/src/lib/components/navigation/ClientTopbar.svelte b/frontend/src/lib/components/navigation/ClientTopbar.svelte new file mode 100644 index 0000000..61caea8 --- /dev/null +++ b/frontend/src/lib/components/navigation/ClientTopbar.svelte @@ -0,0 +1,395 @@ + + +
+
+
+ +

{title}

+
+
+ +
+ +
+ +
+ +
+
+ + diff --git a/frontend/src/lib/components/navigation/WorkspaceSearchTrigger.svelte b/frontend/src/lib/components/navigation/WorkspaceSearchTrigger.svelte new file mode 100644 index 0000000..eaccbe2 --- /dev/null +++ b/frontend/src/lib/components/navigation/WorkspaceSearchTrigger.svelte @@ -0,0 +1,85 @@ + + + + + diff --git a/frontend/src/lib/navigation/client-navigation.ts b/frontend/src/lib/navigation/client-navigation.ts new file mode 100644 index 0000000..76d10b7 --- /dev/null +++ b/frontend/src/lib/navigation/client-navigation.ts @@ -0,0 +1,204 @@ +import { + Boxes, + Calculator, + ClipboardList, + DollarSign, + FlaskConical, + LayoutDashboard, + ShieldCheck, + TrendingUp, + Wheat, + Workflow +} from 'lucide-svelte'; +import type { ComponentType } from 'svelte'; + +import { featureFlags } from '$lib/features'; + +export type SearchItem = { + href: string; + label: string; + description: string; + keywords: string; +}; + +export type NavItem = { + href: string; + label: string; + shortLabel: string; + icon: ComponentType; + moduleKey?: string; +}; + +export type FooterLink = { + href: string; + label: string; + shortLabel: string; + icon: ComponentType; +}; + +export type Crumb = { + label: string; + href?: string; +}; + +export const dashboardItem: NavItem = { + href: '/', + label: 'Dashboard', + shortLabel: 'DB', + icon: LayoutDashboard, + moduleKey: 'dashboard' +}; + +export const mixCalculatorItem: NavItem = { + href: featureFlags.mixCalculatorSessionHistory ? '/mix-calculator' : '/mix-calculator/new', + label: 'Mix Calculator', + shortLabel: 'MC', + icon: Calculator, + moduleKey: 'mix_calculator' +}; + +export const reportingItem: NavItem = { + href: '/reporting', + label: 'Reporting', + shortLabel: 'RP', + icon: TrendingUp, + moduleKey: 'products' +}; + +export const workingDocumentItems: NavItem[] = [ + { href: '/raw-materials', label: 'Raw Materials', shortLabel: 'RM', icon: Wheat, moduleKey: 'raw_materials' }, + { href: '/mixes', label: 'Mix Master', shortLabel: 'MM', icon: FlaskConical, moduleKey: 'mix_master' }, + { href: '/products', label: 'Products', shortLabel: 'PR', icon: Boxes, moduleKey: 'products' }, + { href: '/scenarios', label: 'Scenarios', shortLabel: 'SC', icon: Workflow, moduleKey: 'scenarios' } +]; + +export const accessControlItem: NavItem = { + href: '/client-access', + label: 'Client Access', + shortLabel: 'AC', + icon: ShieldCheck, + moduleKey: 'client_access' +}; + +export const clientNavigationItems: NavItem[] = [ + dashboardItem, + mixCalculatorItem, + ...workingDocumentItems, + accessControlItem +]; + +export const footerLinks: FooterLink[] = [ + { href: '/products', label: 'Delivered Pricing', shortLabel: 'DP', icon: DollarSign }, + { href: '/scenarios', label: 'Planning View', shortLabel: 'PV', icon: ClipboardList } +]; + +export const baseSearchItems: SearchItem[] = [ + { + href: '/', + label: 'Open Dashboard', + description: 'Jump to the Hunter Premium Produce workspace summary.', + keywords: 'hunter premium produce overview dashboard workspace home' + }, + { + href: '/raw-materials', + label: 'Open Raw Materials', + description: 'Review live input costs that feed the pricing model.', + keywords: 'raw materials pricing inputs costs supplier' + }, + { + href: '/mixes', + label: 'Open Mix Master', + description: 'Browse saved mixes and their costing outputs.', + keywords: 'mix master mixes recipes spreadsheet' + }, + { + href: '/mixes/new', + label: 'Create New Mix', + description: 'Start a new costing worksheet for Hunter Premium Produce.', + keywords: 'new mix create worksheet hunter premium produce formula' + }, + ...(featureFlags.mixCalculatorSessionHistory + ? [ + { + href: '/mix-calculator', + label: 'Open Mix Calculator', + description: 'Review saved production sessions and batch calculations.', + keywords: 'mix calculator production sessions batch bags client product' + } + ] + : []), + { + href: '/mix-calculator/new', + label: 'Create Mix Calculation', + description: 'Run a new client-specific mix calculation session.', + keywords: 'new mix calculator session client batch size product bags print' + }, + { + href: '/products', + label: 'Open Products', + description: 'Review delivered product pricing and margins.', + keywords: 'products pricing margins delivered outputs' + }, + { + href: '/reporting', + label: 'Open Reporting', + description: 'View raw material costs, mix summaries, product pricing, and data quality reports.', + keywords: 'reporting reports raw materials mix cost product pricing data quality price review' + }, + { + href: '/settings', + label: 'Open Workspace Settings', + description: 'Review account details and workspace preferences.', + keywords: 'settings account preferences profile workspace' + }, + { + href: '/scenarios', + label: 'Open Scenarios', + description: 'Inspect planning scenarios and overrides.', + keywords: 'scenarios sandbox overrides compare planning' + } +]; + +export function matchesRoute(href: string, pathname: string) { + return href === '/' ? pathname === '/' : pathname.startsWith(href); +} + +export function pageTitle(pathname: string) { + return clientNavigationItems.find((item) => matchesRoute(item.href, pathname))?.label ?? 'Dashboard'; +} + +export function clientBreadcrumbs(pathname: string): Crumb[] { + const root: Crumb = { label: 'Workspace', href: '/' }; + + if (pathname === '/') { + return [root, { label: 'Dashboard' }]; + } + + if (pathname.startsWith('/mix-calculator')) { + const trail: Crumb[] = [root, { label: 'Mix Calculator', href: '/mix-calculator' }]; + if (pathname === '/mix-calculator/new') trail.push({ label: 'New Session' }); + else if (pathname.endsWith('/print')) trail.push({ label: 'Print' }); + else if (pathname !== '/mix-calculator') trail.push({ label: 'Session' }); + return trail; + } + + if (pathname.startsWith('/mixes')) { + const trail: Crumb[] = [root, { label: 'Mix Master', href: '/mixes' }]; + if (pathname === '/mixes/new') trail.push({ label: 'New Mix' }); + else if (pathname !== '/mixes') trail.push({ label: 'Detail' }); + return trail; + } + + const sectionMap: Record = { + '/raw-materials': 'Raw Materials', + '/products': 'Products', + '/scenarios': 'Scenarios', + '/client-access': 'Client Access', + '/reporting': 'Reporting', + '/settings': 'Settings' + }; + const section = sectionMap[pathname]; + if (section) return [root, { label: section }]; + + return [root, { label: pageTitle(pathname) }]; +} diff --git a/frontend/src/routes/mix-calculator/[id]/+page.svelte b/frontend/src/routes/mix-calculator/[id]/+page.svelte index e7cf906..19b27fd 100644 --- a/frontend/src/routes/mix-calculator/[id]/+page.svelte +++ b/frontend/src/routes/mix-calculator/[id]/+page.svelte @@ -1,12 +1,12 @@ {#if data.session} - + {:else}

Mix Calculator

diff --git a/frontend/src/routes/mix-calculator/new/+page.svelte b/frontend/src/routes/mix-calculator/new/+page.svelte index 58353e8..96e42b8 100644 --- a/frontend/src/routes/mix-calculator/new/+page.svelte +++ b/frontend/src/routes/mix-calculator/new/+page.svelte @@ -1,6 +1,6 @@ - + diff --git a/frontend/src/routes/mixes/[id]/+page.svelte b/frontend/src/routes/mixes/[id]/+page.svelte index 750858f..f24f530 100644 --- a/frontend/src/routes/mixes/[id]/+page.svelte +++ b/frontend/src/routes/mixes/[id]/+page.svelte @@ -1,7 +1,7 @@ - + diff --git a/frontend/src/routes/mixes/new/+page.svelte b/frontend/src/routes/mixes/new/+page.svelte index e6178f6..624f7ba 100644 --- a/frontend/src/routes/mixes/new/+page.svelte +++ b/frontend/src/routes/mixes/new/+page.svelte @@ -1,7 +1,7 @@ - + diff --git a/frontend/src/routes/raw-materials/+page.svelte b/frontend/src/routes/raw-materials/+page.svelte index 00c6cad..76df7aa 100644 --- a/frontend/src/routes/raw-materials/+page.svelte +++ b/frontend/src/routes/raw-materials/+page.svelte @@ -1,6 +1,8 @@ -
- + + {#snippet rail()} + (activeId = id as ReportId)} + /> + {/snippet}
- {#if activeReport} - {@const PanelIcon = activeReport.icon} -
- -
-

{activeReport.group}

-

{activeReport.label}

-

{activeReport.description}

-
-
- {/if} -
{#if activeId === 'summary'}
@@ -244,227 +241,127 @@ {/each}
+ + {:else if activeId === 'sales-target-report'} +
+
+
+

Sales Target Report

+

Live embedded Power BI view for sales target tracking and review.

+
+ + Open in Power BI + +
+ +
+ +
+
+ + {:else if activeId === 'finished-product-kanban'} +
+
+
+

Finished Product - Kanban

+

Live embedded Power BI view for finished product review and kanban-style planning.

+
+ + Open in Power BI + +
+ +
+ +
+
{/if}
- +