#!/usr/bin/env python3
import argparse
import base64
import csv
import hashlib
import hmac
import json
import os
import re
import secrets
import time
import unicodedata
from datetime import datetime, timedelta, timezone
from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer
from pathlib import Path
from urllib import error as urlerror
from urllib import request as urlrequest
from urllib.parse import parse_qs, urlencode, urlparse

try:
    import psycopg
except ImportError:
    psycopg = None

try:
    import jwt
except ImportError:
    jwt = None


DB_INTEGRITY_ERRORS = ()
if psycopg is not None:
    DB_INTEGRITY_ERRORS = (psycopg.IntegrityError,)


class DbRow:
    def __init__(self, columns, values):
        self._columns = tuple(columns)
        self._values = tuple(values)
        self._index = {name: idx for idx, name in enumerate(self._columns)}

    def __getitem__(self, key):
        if isinstance(key, int):
            return self._values[key]
        return self._values[self._index[str(key)]]

    def __iter__(self):
        return iter(self._values)

    def keys(self):
        return self._columns

    def items(self):
        return [(name, self._values[idx]) for idx, name in enumerate(self._columns)]

    def get(self, key, default=None):
        try:
            return self[key]
        except KeyError:
            return default


class DbCursor:
    def __init__(self, cursor, columns):
        self._cursor = cursor
        self._columns = columns
        self.lastrowid = getattr(cursor, "lastrowid", None)

    def _wrap_row(self, row):
        if row is None:
            return None
        if isinstance(row, DbRow):
            return row
        return DbRow(self._columns, row)

    def fetchone(self):
        return self._wrap_row(self._cursor.fetchone())

    def fetchall(self):
        return [self._wrap_row(row) for row in self._cursor.fetchall()]


class DbConnection:
    def __init__(self, conn, dialect: str):
        self._conn = conn
        self.dialect = dialect

    @staticmethod
    def _translate_query(dialect: str, query: str) -> str:
        if dialect != "postgres":
            return query
        return query.replace("?", "%s")

    def execute(self, query: str, params=()):
        cursor = self._conn.cursor()
        cursor.execute(self._translate_query(self.dialect, query), params)
        columns = [item[0] for item in (cursor.description or [])]
        return DbCursor(cursor, columns)

    def executemany(self, query: str, seq_of_params):
        cursor = self._conn.cursor()
        cursor.executemany(self._translate_query(self.dialect, query), seq_of_params)
        columns = [item[0] for item in (cursor.description or [])]
        return DbCursor(cursor, columns)

    def commit(self):
        self._conn.commit()

    def rollback(self):
        self._conn.rollback()

    def close(self):
        self._conn.close()

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc, _tb):
        if exc is not None:
            self.rollback()
        self.close()


def open_db_connection(db_dialect: str, db_target: str) -> DbConnection:
    if db_dialect != "postgres":
        raise RuntimeError("Unsupported database dialect. Expected 'postgres'.")
    if psycopg is None:
        raise RuntimeError(
            "PostgreSQL ist konfiguriert, aber 'psycopg' ist nicht installiert. "
            "Bitte 'pip install psycopg[binary]' ausfuehren."
        )
    return DbConnection(psycopg.connect(db_target), "postgres")


def table_columns(conn: DbConnection, table_name: str):
    rows = conn.execute(
        """
        SELECT column_name
        FROM information_schema.columns
        WHERE table_schema = 'public' AND table_name = ?
        """,
        (table_name,),
    ).fetchall()
    return {row["column_name"] for row in rows}


def insert_and_get_id(conn: DbConnection, sql: str, params=()):
    row = conn.execute(f"{sql} RETURNING id", params).fetchone()
    return int(row["id"])


MEASURES = [
    ("tier-model", "onprem", "Tier-Modell / Enterprise Zugriffsmodell", "inprogress", "Administrative Rollen nach Enterprise-Zugriffsmodell segmentieren."),
    ("entra-ca-mfa-defaults", "cloud", "Entra Conditional Access (CA) / M365 MFA Security Defaults (SD)", "inprogress", "Tenant-weite Grundhaertung fuer MFA und Zugriffssteuerung."),
    ("entra-phishing-resistant-mfa", "cloud", "Entra Phishing Resistent MFA", "open", "Starke Faktoren wie FIDO2 oder passwortlose Verfahren ausbauen."),
    ("ca-compliant-devices", "cloud", "Conditional Access nur von compliant Devices", "inprogress", "Ressourcenzugriff auf compliant und registrierte Endgeraete begrenzen."),
    ("m365-advisor-score-target", "cloud", "M365 (Advisor/Sicherheitsbewertung in % Ziel 80)", "inprogress", "M365 Security Score systematisch Richtung 80 Prozent bringen."),
    ("cipp-standards", "cloud", "CIPP Standards", "done", "Standardisierte Baselines und Mandantenrichtlinien ueber CIPP anwenden."),
    ("domain-risk-level", "onprem", "Domain Risk Level (lokale AD)", "open", "Risikobewertung fuer lokale AD-Struktur, Gruppen und Legacy-Protokolle."),
    ("edr", "endpoint", "EDR", "inprogress", "Endpoint Detection and Response flaechendeckend auf Clients und Servern."),
    ("client-security-asr", "endpoint", "Clientsicherheit (ASR)", "inprogress", "Attack Surface Reduction Regeln ausrollen und Ausnahmen reduzieren."),
    ("laps-single-local-admin", "endpoint", "LAPS (einziger lokaler Admin am Client)", "done", "Lokale Adminkonten ueber LAPS steuern und rotieren."),
    ("windows-hello-mfa", "endpoint", "Windows Hello (MFA am Client)", "inprogress", "Benutzeranmeldung ueber starke lokale MFA mit Windows Hello."),
    ("mobile-device-management", "endpoint", "Mobile Device Management", "inprogress", "Mobile Endgeraete inventarisieren, absichern und compliant halten."),
    ("secure-external-access", "onprem", "Absicherung externe Zugriffe (kein Forwarding, DMZ, VPN, Entra GSA)", "open", "Externe Zugriffswege minimieren und auf Zero-Trust-Methoden umbauen."),
    ("network-segmentation", "onprem", "Netzwerksegmentierung", "open", "Kritische Zonen trennen und seitliche Bewegungen erschweren."),
    ("firewall-baseline", "onprem", "Firewall-Baseline", "inprogress", "Einheitliche Baseline-Regeln nach Standort und Rollentypen etablieren."),
    ("firewall-ruleset", "onprem", "Firewall-Regelwerk", "open", "Bestehende Regeln bereinigen, dokumentieren und rezertifizieren."),
    ("backup-321-cloud", "cloud", "3-2-1 Backupkonzept (Cloud)", "inprogress", "Mehrstufiges Backupdesign mit isolierter Kopie umsetzen."),
    ("m365-backup", "cloud", "M365 Backup", "open", "M365-Workloads in ein dediziertes Backup-/Restore-Konzept ueberfuehren."),
    ("disaster-recovery-check", "onprem", "Disaster Recover Check", "open", "Regelmaessige Wiederanlauf- und Restore-Pruefungen dokumentieren."),
    ("physical-security", "onprem", "Physische Sicherheit", "inprogress", "Zutrittsschutz, Hardware-Schutz und Standortkontrollen staerken."),
    ("saas-security", "cloud", "SaaS Security", "open", "SaaS-Anwendungen inventarisieren und Sicherheitsvorgaben harmonisieren."),
    ("mail-auth-spf-dkim-dmarc", "cloud", "SPF / DKIM / DMARC", "open", "E-Mail-Domain-Schutz ueber SPF, DKIM und DMARC abgestimmt implementieren und ueber Reports nachhalten."),
    ("password-manager", "organizational", "Passwortmanager", "inprogress", "Zentralen Passwortmanager fuer Benutzer und Adminkonten etablieren."),
    ("awareness-training", "organizational", "Awareness Schulungen", "inprogress", "Rollenbasierte Security-Trainings und Phishing-Simulationen ausrollen."),
    ("internal-risk-level", "onprem", "Interne Risikostufe", "open", "Risiko je Fachbereich bewerten und in Priorisierungsmodell uebernehmen."),
    ("zero-trust-implemented", "cloud", "ZeroTrust umgesetzt", "open", "Reifegrad fuer Zero-Trust ueber Identitaet, Geraete und Daten messen."),
    ("supplier-risk-management", "organizational", "Lieferanten-/Dienstleister-Risiko", "open", "Cloud-Dienste externer Softwarehersteller bzgl. Sicherheitsniveau, Datenzugriffen und Exit-Faehigkeit bewerten."),
    ("vulnerability-management-process", "organizational", "Vulnerability-Management Prozess", "inprogress", "Scan-, Priorisierungs- und Patch-SLA-Prozess mit dokumentierten Ausnahmen verankern."),
    ("logging-detection-operations", "organizational", "Logging & Detection Betrieb", "inprogress", "Zentrale Logsammlung, Alarmregeln und Reaktionszeiten fuer Kernsysteme festlegen."),
    ("incident-response-playbooks", "organizational", "Incident-Response Playbooks", "open", "Ransomware/BEC/Account-Takeover-Playbooks inkl. Uebungen regelmaessig testen."),
    ("identity-lifecycle-jml", "organizational", "Identity Lifecycle (Joiner/Mover/Leaver)", "inprogress", "Durchgaengiger JML-Prozess inkl. Rechte-Rezertifizierung und Deprovisioning."),
    ("nis2-regulatory-check", "organizational", "Regulatorik-Check (NIS2/KRITIS)", "open", "Betroffenheit, Meldepflichten und Nachweisprozess fuer NIS2 und KRITIS strukturiert dokumentieren."),
]

TENANTS = [
    ("nigefa-demo", "NIGEFA Demo GmbH"),
    ("acme-tools", "ACME Tools AG"),
    ("holzbau-nord", "Holzbau Nord KG"),
    ("gemeinde-furth-rathaus", "Gemeinde Fürth Rathaus"),
    ("gfn-selco", "GFN-Selco"),
    ("dietz-ag", "Dietz AG"),
    ("spir-star-ag", "SPIR STAR AG"),
    ("spirstar-frankreich", "Spirstar Frankreich"),
    ("kilian-kanalsanierung", "Kilian Kanalsanierung"),
    ("max-jung-transport-gmbh", "Max Jung Transport GmbH"),
    ("incity-ag", "InCity AG"),
    ("kopp-schleiftechnik", "Kopp-Schleiftechnik"),
    ("gemeindeverwaltung-rimbach", "Gemeindeverwaltung Rimbach"),
    ("schweiger-und-scholz", "Schweiger und Scholz"),
    ("div-mannheim-terra-cloud", "DIV Mannheim /Terra Cloud"),
    ("hvb-hausverwaltung-brosius", "HVB Hausverwaltung Brosius"),
    ("iq-bau-gmbh", "IQ BAU GmbH"),
    ("papurex", "Papurex"),
    ("behindertenhilfe-bergstrasse-bhb", "Behindertenhilfe Bergstraße (BHB)"),
    ("notariat-kilian", "Notariat Kilian"),
    ("monoflo", "Monoflo"),
    ("gemeinde-gorxheimertal", "Gemeinde Gorxheimertal"),
    ("cortec-wald-michelbach", "Cortec, Wald-Michelbach"),
    ("freudenbergstiftung", "Freudenbergstiftung"),
    ("unold-ag", "Unold AG"),
    ("big-haus", "BIG-Haus"),
    ("dr-dorsam", "Dr. Dörsam"),
    ("dr-med-trautmann-und-horr", "Dr. med Trautmann und Hörr"),
    ("eidenmuller-jochem-gawehn", "Eidenmüller /Jochem-Gawehn"),
    ("entwicklungsgesellschaft-lorsch", "Entwicklungsgesellschaft Lorsch"),
    ("e-concept", "E-Concept"),
    ("gemeinde-furth-bauhof", "Gemeinde Fürth Bauhof"),
    ("gemeinschaftspraxis-erbach", "Gemeinschaftspraxis Erbach"),
    ("gkh-versicherungen", "GKH-Versicherungen"),
    ("gloede-partner", "Gloede + Partner"),
    ("hausverwaltung-gotz", "Hausverwaltung Götz"),
    ("haus-birkenhof-seeheim-jugenheim", "Haus Birkenhof, Seeheim-Jugenheim"),
    ("helmling-baumaschinen", "Helmling Baumaschinen"),
    ("heinz-knatz", "Heinz & Knatz"),
    ("hotel-hamburger-hof-frankfurt-schnabel", "Hotel Hamburger Hof, Frankfurt (Schnabel)"),
    ("hausverwaltung-schnabel", "Hausverwaltung Schnabel"),
    ("isp", "ISP"),
    ("kilian-gmbh", "Kilian GmbH"),
    ("marneth", "Marneth"),
    ("ortlieb-organic", "Ortlieb Organic"),
    ("pfeifer-baumaschinen-ohne-server", "Pfeifer Baumaschinen (ohne Server)"),
    ("sanitar-schmitt-morlenbach", "Sanitär Schmitt, Mörlenbach"),
    ("skarke", "Skarke"),
    ("schmusser-gmbh", "Schmüsser GmbH"),
    ("stadt-lorsch", "Stadt Lorsch"),
    ("stadt-lorsch-feuerwehr", "Stadt Lorsch Feuerwehr"),
    ("swoboda-praxis", "Swoboda Praxis"),
    ("raber", "Raber"),
    ("rc-reisemobil-mannheim", "RC Reisemobil Mannheim"),
    ("talon", "Talon"),
    ("andreas-koster-tierartzt", "Andreas Köster Tierartzt"),
    ("gies-meier-advocates", "Gies-Meier Advocates"),
    ("waldhauser", "Waldhauser"),
    ("vipharm", "Vipharm"),
    ("zelo", "Zelo"),
    ("banater-schwaben", "Banater-Schwaben"),
    ("crew-knuttel", "Crew Knüttel"),
]

PORTAL_OVERVIEWS = {
    "nigefa-demo": {
        "kpis": {"active_services": 12, "devices": 186, "open_tickets": 7},
        "score": {
            "current": 394,
            "max": 530,
            "benchmark": 57,
            "target": 80,
            "categories": [
                {"name": "Identity", "points": 122, "max": 180, "state": "warn", "label": "Ausbau"},
                {"name": "Device", "points": 104, "max": 150, "state": "warn", "label": "Ausbau"},
                {"name": "Data", "points": 96, "max": 110, "state": "good", "label": "Stabil"},
                {"name": "Apps", "points": 72, "max": 90, "state": "good", "label": "Stabil"},
            ],
        },
        "mdm": {"compliance": "92%", "unmanaged": "14", "policy_drift": "3 Richtlinien", "last_sync": "Heute, 14:31"},
    },
    "acme-tools": {
        "kpis": {"active_services": 10, "devices": 132, "open_tickets": 4},
        "score": {
            "current": 426,
            "max": 530,
            "benchmark": 57,
            "target": 80,
            "categories": [
                {"name": "Identity", "points": 145, "max": 180, "state": "good", "label": "Stabil"},
                {"name": "Device", "points": 112, "max": 150, "state": "warn", "label": "Ausbau"},
                {"name": "Data", "points": 97, "max": 110, "state": "good", "label": "Stabil"},
                {"name": "Apps", "points": 72, "max": 90, "state": "good", "label": "Stabil"},
            ],
        },
        "mdm": {"compliance": "95%", "unmanaged": "6", "policy_drift": "1 Richtlinie", "last_sync": "Heute, 13:42"},
    },
    "holzbau-nord": {
        "kpis": {"active_services": 8, "devices": 74, "open_tickets": 9},
        "score": {
            "current": 301,
            "max": 530,
            "benchmark": 57,
            "target": 80,
            "categories": [
                {"name": "Identity", "points": 96, "max": 180, "state": "warn", "label": "Ausbau"},
                {"name": "Device", "points": 83, "max": 150, "state": "warn", "label": "Ausbau"},
                {"name": "Data", "points": 71, "max": 110, "state": "warn", "label": "Ausbau"},
                {"name": "Apps", "points": 51, "max": 90, "state": "warn", "label": "Ausbau"},
            ],
        },
        "mdm": {"compliance": "81%", "unmanaged": "19", "policy_drift": "5 Richtlinien", "last_sync": "Heute, 11:08"},
    },
}

PORTAL_CONTRACTS = {
    "nigefa-demo": [
        {
            "contract_id": "server-monitoring",
            "service": "Systemueberwachung Server",
            "package": "Infra Core",
            "sla": "24x7",
            "contract_no": "V-2026-001",
            "status_class": "good",
            "status_label": "Aktiv",
            "runtime": "Laufzeit bis 31.12.2026",
            "description": "Monitoring, Alarmierung und Basisreaktion fuer produktive Server-Systeme.",
            "scope": [
                "CPU/RAM/Disk Monitoring mit Schwellwert-Alarmen",
                "Dienstueberwachung (AD, DNS, File, SQL) inkl. Ticket-Push",
                "Patch-Status und Reboot-Fensterkontrolle",
                "Monatlicher Servicebericht",
            ],
            "assets": ["SRV-DC01", "SRV-FS01", "SRV-APP01", "SRV-SQL01"],
        },
        {
            "contract_id": "pc-monitoring",
            "service": "Systemueberwachung PC-Systeme",
            "package": "Endpoint Plus",
            "sla": "8x5",
            "contract_no": "V-2026-002",
            "status_class": "good",
            "status_label": "Aktiv",
            "runtime": "Laufzeit bis 31.12.2026",
            "description": "Zustandsueberwachung und Endpoint-Betriebsservice fuer Arbeitsplatzgeraete.",
            "scope": ["Agent-Health", "Patch-Compliance", "AV/EDR Status", "Lifecycle Reporting"],
            "assets": ["NB-2301", "PC-1178", "MB-9012", "VDI-1101 bis VDI-1112"],
        },
        {
            "contract_id": "backup-monitoring",
            "service": "Backup Monitoring",
            "package": "Cloud Backup",
            "sla": "24x7",
            "contract_no": "V-2026-003",
            "status_class": "warn",
            "status_label": "Review",
            "runtime": "Laufzeit bis 31.12.2026",
            "description": "Sicherstellung erfolgreicher Sicherungen und Restore-Bereitschaft.",
            "scope": ["Backup-Job Monitoring", "Immutable Storage Kontrolle", "Restore-Test", "RPO/RTO Eskalation"],
            "assets": ["M365 Mailboxen: 82", "SharePoint Sites: 14", "OneDrive Konten: 96", "Server-Workloads: 3"],
        },
    ],
    "acme-tools": [
        {
            "contract_id": "server-monitoring",
            "service": "Systemueberwachung Server",
            "package": "Infra Core",
            "sla": "24x7",
            "contract_no": "V-2026-101",
            "status_class": "good",
            "status_label": "Aktiv",
            "runtime": "Laufzeit bis 30.11.2026",
            "description": "Serverbetrieb inkl. Schwellwertueberwachung und Eskalation.",
            "scope": ["Monitoring", "Service Checks", "Patch Oversight"],
            "assets": ["SRV-ERP01", "SRV-DC02", "SRV-FS03"],
        },
        {
            "contract_id": "pc-monitoring",
            "service": "Systemueberwachung PC-Systeme",
            "package": "Endpoint Basic",
            "sla": "8x5",
            "contract_no": "V-2026-102",
            "status_class": "good",
            "status_label": "Aktiv",
            "runtime": "Laufzeit bis 30.11.2026",
            "description": "Endpoint-Monitoring fuer Office-Arbeitsplaetze.",
            "scope": ["Agent-Health", "EDR-Status", "Patchstand"],
            "assets": ["NB-4101 bis NB-4145", "PC-2101 bis PC-2144"],
        },
    ],
    "holzbau-nord": [
        {
            "contract_id": "server-monitoring",
            "service": "Systemueberwachung Server",
            "package": "Infra Start",
            "sla": "8x5",
            "contract_no": "V-2026-201",
            "status_class": "warn",
            "status_label": "Review",
            "runtime": "Laufzeit bis 31.10.2026",
            "description": "Basismonitoring fuer Kernserver mit Fokus Verfuegbarkeit.",
            "scope": ["Ping/Service Monitoring", "Ticket-Eskalation", "Monatsreport"],
            "assets": ["SRV-FILE01", "SRV-DC01"],
        },
        {
            "contract_id": "backup-monitoring",
            "service": "Backup Monitoring",
            "package": "Cloud Backup",
            "sla": "24x7",
            "contract_no": "V-2026-202",
            "status_class": "warn",
            "status_label": "Review",
            "runtime": "Laufzeit bis 31.10.2026",
            "description": "Backupkontrolle inkl. Restore-Probe fuer kritische Daten.",
            "scope": ["Jobmonitoring", "Restore Probe", "Eskalation"],
            "assets": ["M365 Mailboxen: 37", "File Shares: 6"],
        },
    ],
}

PORTAL_INVENTORY = {
    "nigefa-demo": [
        {"hostname": "NB-2301", "type": "Notebook", "source": "ServerEye + Entra", "user": "M. Becker", "patch": "Aktuell", "status": "ok", "label": "Compliant"},
        {"hostname": "PC-1178", "type": "Desktop", "source": "Intune", "user": "L. Vogt", "patch": "3 Tage alt", "status": "warn", "label": "Warnung"},
        {"hostname": "SRV-FS01", "type": "Server", "source": "RMM Agent", "user": "-", "patch": "18 Tage alt", "status": "bad", "label": "Kritisch"},
        {"hostname": "MB-9012", "type": "Mobil", "source": "Intune", "user": "S. Koch", "patch": "Aktuell", "status": "ok", "label": "Compliant"},
    ],
    "acme-tools": [
        {"hostname": "NB-4120", "type": "Notebook", "source": "Intune", "user": "A. Fischer", "patch": "Aktuell", "status": "ok", "label": "Compliant"},
        {"hostname": "PC-2133", "type": "Desktop", "source": "ServerEye", "user": "N. Hartmann", "patch": "6 Tage alt", "status": "warn", "label": "Warnung"},
        {"hostname": "SRV-ERP01", "type": "Server", "source": "RMM Agent", "user": "-", "patch": "Aktuell", "status": "ok", "label": "Compliant"},
    ],
    "holzbau-nord": [
        {"hostname": "NB-701", "type": "Notebook", "source": "Intune", "user": "P. Winter", "patch": "12 Tage alt", "status": "warn", "label": "Warnung"},
        {"hostname": "PC-512", "type": "Desktop", "source": "ServerEye", "user": "L. Maas", "patch": "20 Tage alt", "status": "bad", "label": "Kritisch"},
        {"hostname": "SRV-FILE01", "type": "Server", "source": "RMM Agent", "user": "-", "patch": "9 Tage alt", "status": "warn", "label": "Warnung"},
    ],
}

PORTAL_ADVISOR = {
    "nigefa-demo": {
        "identity": [
            {"title": "Legacy Authentication blockieren", "state": "open", "impact": "+3%"},
            {"title": "Phishing-resistente MFA fuer Admins", "state": "inprogress", "impact": "+4%"},
        ],
        "device": [
            {"title": "Compliant Device Zugriff erzwingen", "state": "inprogress", "impact": "+2%"},
            {"title": "ASR-Regeln im Block-Mode", "state": "open", "impact": "+2%"},
        ],
        "data": [
            {"title": "M365 DLP Baseline aktivieren", "state": "open", "impact": "+3%"},
            {"title": "Externes Auto-Forwarding blockieren", "state": "inprogress", "impact": "+2%"},
        ],
        "apps": [
            {"title": "App Consent Richtlinien haerten", "state": "inprogress", "impact": "+2%"},
            {"title": "Defender for Cloud Apps Policies", "state": "open", "impact": "+3%"},
        ],
    },
    "acme-tools": {
        "identity": [{"title": "FIDO2 Rollout fuer Admins", "state": "inprogress", "impact": "+3%"}],
        "device": [{"title": "Defender Tamper Protection Tenantweit", "state": "done", "impact": "+1%"}],
        "data": [{"title": "Retention Labels fuer Finance", "state": "inprogress", "impact": "+2%"}],
        "apps": [{"title": "OAuth App Reviews automatisieren", "state": "open", "impact": "+2%"}],
    },
    "holzbau-nord": {
        "identity": [{"title": "Security Defaults aktivieren", "state": "open", "impact": "+2%"}],
        "device": [{"title": "BitLocker Compliance auf 95% anheben", "state": "open", "impact": "+3%"}],
        "data": [{"title": "M365 Backup fuer SharePoint ausrollen", "state": "inprogress", "impact": "+2%"}],
        "apps": [{"title": "SaaS App Shadow IT Review", "state": "open", "impact": "+2%"}],
    },
}

TENANT_USERS = {
    "nigefa-demo": [
        ("Max Admin", "max.admin@nigefa-demo.local", "Admin"),
        ("Sven Member", "sven.member@nigefa-demo.local", "Member"),
    ],
    "acme-tools": [
        ("Anna Admin", "anna.admin@acme-tools.local", "Admin"),
        ("Lena Member", "lena.member@acme-tools.local", "Member"),
    ],
    "holzbau-nord": [
        ("Peter Admin", "peter.admin@holzbau-nord.local", "Admin"),
    ],
}

VALID_STATUS = {"open", "inprogress", "done", "loop", "na"}
VALID_APP_USER_ROLES = {"Admin", "Member"}
VALID_TENANT_USER_ROLES = {"Admin", "Member"}
VALID_APP_AUTH_PROVIDERS = {"oidc", "password"}
BASELINE_TENANT_SLUG = "nigefa-demo"

CSV_SECURITY_HEADERS = [
    "Kunde",
    "Tier-Modell / Enterprise Zugriffsmodell",
    "Entra Conditional Access (CA) / M365 MFA Security Defaults (SD)",
    "Entra Phising Resistent MFA",
    "Conditional Access - Zugriff auf Unternehmensressourcen nur von compliant Devices",
    "M365 (Advisor/Sicherheitsbewertung in % Ziel 80)",
    "CIPP Standards",
    "Domain Risk Level (lokale AD)",
    "EDR",
    "Clientsicherheit (ASR)",
    "LAPS (als einziger lokaler Admin am Client)",
    "Windows Hello (MFA am Client)",
    "Mobile Device Managment",
    "Absicherung externe Zugriffe (kein Forwarding, DMZ, VPN, Entra GSA)",
    "Netzwerksegmentierung",
    "Firewall-Baseline",
    "Firewall-Regelwerk",
    "3-2-1 Backupkonzept (Cloud)",
    "M365 Backup",
    "Disaster Recover Check",
    "Physische Sicherheit",
    "SaaS Security",
    "Passwortmanager",
    "Awareness Schulungen",
    "Interne Risikostufe",
    "ZeroTrust umgesetzt",
]

CSV_COLUMN_TO_MEASURE_KEY = {
    "Tier-Modell / Enterprise Zugriffsmodell": "tier-model",
    "Entra Conditional Access (CA) / M365 MFA Security Defaults (SD)": "entra-ca-mfa-defaults",
    "Entra Phising Resistent MFA": "entra-phishing-resistant-mfa",
    "Conditional Access - Zugriff auf Unternehmensressourcen nur von compliant Devices": "ca-compliant-devices",
    "M365 (Advisor/Sicherheitsbewertung in % Ziel 80)": "m365-advisor-score-target",
    "CIPP Standards": "cipp-standards",
    "Domain Risk Level (lokale AD)": "domain-risk-level",
    "EDR": "edr",
    "Clientsicherheit (ASR)": "client-security-asr",
    "LAPS (als einziger lokaler Admin am Client)": "laps-single-local-admin",
    "Windows Hello (MFA am Client)": "windows-hello-mfa",
    "Mobile Device Managment": "mobile-device-management",
    "Absicherung externe Zugriffe (kein Forwarding, DMZ, VPN, Entra GSA)": "secure-external-access",
    "Netzwerksegmentierung": "network-segmentation",
    "Firewall-Baseline": "firewall-baseline",
    "Firewall-Regelwerk": "firewall-ruleset",
    "3-2-1 Backupkonzept (Cloud)": "backup-321-cloud",
    "M365 Backup": "m365-backup",
    "Disaster Recover Check": "disaster-recovery-check",
    "Physische Sicherheit": "physical-security",
    "SaaS Security": "saas-security",
    "Passwortmanager": "password-manager",
    "Awareness Schulungen": "awareness-training",
    "Interne Risikostufe": "internal-risk-level",
    "ZeroTrust umgesetzt": "zero-trust-implemented",
}

CSV_TENANT_NAME_ALIASES = {
    "nigefa computersysteme gmbh": "nigefa demo gmbh",
}

CSV_STATUS_DONE = {"j", "ja", "y", "yes", "sd", "vorhanden", "true", "1"}
CSV_STATUS_INPROGRESS = {"k", "teilweise", "partial", "2"}
CSV_STATUS_OPEN = {"n", "nein", "no", "false", "0", "v"}
CSV_STATUS_LOOP = {"loop", "in the loop", "review", "zyklisch", "regelreview"}
CSV_STATUS_NA = {"na", "n/a", "nicht relevant"}
MANAGED_PRESENTATION_TOKEN_TTL_SECONDS = 900
MANAGED_PRESENTATION_MAX_TTL_DAYS = 30
APP_ENV = str(os.environ.get("APP_ENV", "development")).strip().lower()
PBKDF2_ITERATIONS = int(os.environ.get("KP_PASSWORD_HASH_ITERATIONS", "210000"))
MAX_REQUEST_BODY_BYTES = int(os.environ.get("KP_MAX_REQUEST_BODY_BYTES", "1048576"))
_MANAGED_PRESENTATION_SECRET_ENV = str(os.environ.get("KP_MANAGED_PRESENTATION_SECRET", "")).strip()
_CORS_ALLOW_ORIGINS_ENV = str(os.environ.get("KP_CORS_ALLOW_ORIGINS", "")).strip()
_AUTH_SESSION_SECRET_ENV = str(os.environ.get("KP_AUTH_SESSION_SECRET", "")).strip()
OIDC_ENABLED = str(os.environ.get("KP_OIDC_ENABLED", "1")).strip().lower() not in {"0", "false", "no"}
OIDC_TENANT_ID = str(os.environ.get("KP_OIDC_TENANT_ID", "")).strip()
OIDC_CLIENT_ID = str(os.environ.get("KP_OIDC_CLIENT_ID", "")).strip()
OIDC_CLIENT_SECRET = str(os.environ.get("KP_OIDC_CLIENT_SECRET", "")).strip()
OIDC_REDIRECT_URI = str(os.environ.get("KP_OIDC_REDIRECT_URI", "")).strip()
OIDC_SCOPE = str(os.environ.get("KP_OIDC_SCOPE", "openid profile email")).strip()
OIDC_ALLOWED_DOMAINS = {
    item.strip().lower()
    for item in str(os.environ.get("KP_OIDC_ALLOWED_DOMAINS", "")).split(",")
    if item.strip()
}
OIDC_ALLOWED_USERS = {
    item.strip().lower()
    for item in str(os.environ.get("KP_OIDC_ALLOWED_USERS", "")).split(",")
    if item.strip()
}
OIDC_ADMIN_GROUP_IDS = {
    item.strip()
    for item in str(os.environ.get("KP_OIDC_ADMIN_GROUP_IDS", "")).split(",")
    if item.strip()
}
AUTH_SESSION_TTL_SECONDS = int(os.environ.get("KP_AUTH_SESSION_TTL_SECONDS", "28800"))
OIDC_HTTP_TIMEOUT_SECONDS = int(os.environ.get("KP_OIDC_HTTP_TIMEOUT_SECONDS", "10"))
_RBAC_DEFAULT = "1" if APP_ENV in {"production", "prod"} else "0"
RBAC_ENFORCED = str(os.environ.get("KP_RBAC_ENFORCED", _RBAC_DEFAULT)).strip().lower() not in {"0", "false", "no"}
_OIDC_BOOTSTRAP_ADMIN_DEFAULT = "0" if APP_ENV in {"production", "prod"} else "1"
OIDC_BOOTSTRAP_ADMIN = str(os.environ.get("KP_OIDC_BOOTSTRAP_ADMIN", _OIDC_BOOTSTRAP_ADMIN_DEFAULT)).strip().lower() not in {"0", "false", "no"}
DEFAULT_DEV_CORS_ALLOW_ORIGINS = {
    "http://127.0.0.1:8000",
    "http://localhost:8000",
    "http://127.0.0.1:8001",
    "http://localhost:8001",
    "http://127.0.0.1:8010",
    "http://localhost:8010",
}


def _load_managed_presentation_secret() -> str:
    if _MANAGED_PRESENTATION_SECRET_ENV:
        if len(_MANAGED_PRESENTATION_SECRET_ENV) < 32 and APP_ENV in {"production", "prod"}:
            raise RuntimeError("KP_MANAGED_PRESENTATION_SECRET muss in Produktion mindestens 32 Zeichen lang sein.")
        return _MANAGED_PRESENTATION_SECRET_ENV

    if APP_ENV in {"production", "prod"}:
        raise RuntimeError("KP_MANAGED_PRESENTATION_SECRET ist in Produktion erforderlich.")

    generated = secrets.token_urlsafe(48)
    print("[kundenportal-api] warning: using ephemeral managed presentation secret (set KP_MANAGED_PRESENTATION_SECRET).")
    return generated


MANAGED_PRESENTATION_SECRET = _load_managed_presentation_secret()


def _load_auth_session_secret() -> str:
    if _AUTH_SESSION_SECRET_ENV:
        if len(_AUTH_SESSION_SECRET_ENV) < 32 and APP_ENV in {"production", "prod"}:
            raise RuntimeError("KP_AUTH_SESSION_SECRET muss in Produktion mindestens 32 Zeichen lang sein.")
        return _AUTH_SESSION_SECRET_ENV
    if APP_ENV in {"production", "prod"}:
        raise RuntimeError("KP_AUTH_SESSION_SECRET ist in Produktion erforderlich.")
    generated = secrets.token_urlsafe(48)
    print("[kundenportal-api] warning: using ephemeral auth session secret (set KP_AUTH_SESSION_SECRET).")
    return generated


AUTH_SESSION_SECRET = _load_auth_session_secret()


def now_iso() -> str:
    return datetime.now(tz=timezone.utc).isoformat()


def now_unix() -> int:
    return int(time.time())


def _b64url_encode(raw: bytes) -> str:
    return base64.urlsafe_b64encode(raw).decode("ascii").rstrip("=")


def _b64url_decode(value: str) -> bytes:
    padding = "=" * ((4 - len(value) % 4) % 4)
    return base64.urlsafe_b64decode(value + padding)


def make_password_salt() -> str:
    return secrets.token_hex(16)


def hash_password_with_salt(salt: str, password: str) -> str:
    derived = hashlib.pbkdf2_hmac(
        "sha256",
        str(password).encode("utf-8"),
        str(salt).encode("utf-8"),
        PBKDF2_ITERATIONS,
    )
    return derived.hex()


def verify_password_hash(salt: str, password: str, expected_hash: str) -> bool:
    expected = str(expected_hash or "").strip().lower()
    if not expected:
        return False

    current = hash_password_with_salt(salt, password)
    if hmac.compare_digest(current, expected):
        return True

    # Backward compatibility for already generated links.
    legacy = hashlib.sha256(f"{salt}:{password}".encode("utf-8")).hexdigest()
    return hmac.compare_digest(legacy, expected)


def make_share_id() -> str:
    return secrets.token_urlsafe(18)


def sign_managed_presentation_token(payload: dict) -> str:
    payload_json = json.dumps(payload, separators=(",", ":"), sort_keys=True).encode("utf-8")
    payload_part = _b64url_encode(payload_json)
    signature = hmac.new(
        MANAGED_PRESENTATION_SECRET.encode("utf-8"),
        payload_part.encode("ascii"),
        hashlib.sha256,
    ).digest()
    return f"{payload_part}.{_b64url_encode(signature)}"


def verify_managed_presentation_token(token: str):
    if not isinstance(token, str) or "." not in token:
        return None
    payload_part, signature_part = token.split(".", 1)
    expected_signature = hmac.new(
        MANAGED_PRESENTATION_SECRET.encode("utf-8"),
        payload_part.encode("ascii"),
        hashlib.sha256,
    ).digest()
    try:
        actual_signature = _b64url_decode(signature_part)
    except Exception:
        return None
    if not hmac.compare_digest(expected_signature, actual_signature):
        return None
    try:
        payload = json.loads(_b64url_decode(payload_part).decode("utf-8"))
    except Exception:
        return None
    if not isinstance(payload, dict):
        return None
    if int(payload.get("exp", 0) or 0) < now_unix():
        return None
    return payload


def make_auth_session_token(payload: dict) -> str:
    payload_json = json.dumps(payload, separators=(",", ":"), sort_keys=True).encode("utf-8")
    payload_part = _b64url_encode(payload_json)
    signature = hmac.new(
        AUTH_SESSION_SECRET.encode("utf-8"),
        payload_part.encode("ascii"),
        hashlib.sha256,
    ).digest()
    return f"{payload_part}.{_b64url_encode(signature)}"


def verify_auth_session_token(token: str):
    if not isinstance(token, str) or "." not in token:
        return None
    payload_part, signature_part = token.split(".", 1)
    expected_signature = hmac.new(
        AUTH_SESSION_SECRET.encode("utf-8"),
        payload_part.encode("ascii"),
        hashlib.sha256,
    ).digest()
    try:
        actual_signature = _b64url_decode(signature_part)
    except Exception:
        return None
    if not hmac.compare_digest(expected_signature, actual_signature):
        return None
    try:
        payload = json.loads(_b64url_decode(payload_part).decode("utf-8"))
    except Exception:
        return None
    if not isinstance(payload, dict):
        return None
    if str(payload.get("typ", "")) != "portal-session":
        return None
    if int(payload.get("exp", 0) or 0) < now_unix():
        return None
    return payload


def _oidc_is_configured() -> bool:
    return OIDC_ENABLED and bool(OIDC_TENANT_ID and OIDC_CLIENT_ID and OIDC_REDIRECT_URI)


def _oidc_discovery_url() -> str:
    return f"https://login.microsoftonline.com/{OIDC_TENANT_ID}/v2.0/.well-known/openid-configuration"


def _http_json_request(url: str, method: str = "GET", data=None):
    payload = None
    headers = {"Accept": "application/json"}
    if data is not None:
        payload = urlencode(data).encode("utf-8")
        headers["Content-Type"] = "application/x-www-form-urlencoded"
    req = urlrequest.Request(url=url, method=method, data=payload, headers=headers)
    # URLs come from fixed Entra endpoints; custom schemes are not accepted by config flow.
    with urlrequest.urlopen(req, timeout=OIDC_HTTP_TIMEOUT_SECONDS) as response:  # nosec B310
        body = response.read().decode("utf-8")
    return json.loads(body)


def _extract_email_from_claims(claims: dict) -> str:
    for key in ("preferred_username", "upn", "email"):
        value = str(claims.get(key, "")).strip().lower()
        if value:
            return value
    return ""


def _role_priority(role: str) -> int:
    order = {"admin": 0, "member": 1, "user": 2}
    return order.get(str(role or "").strip().lower(), 99)


def _oidc_roles_from_app_users(db_dialect: str, db_target: str, user_email: str) -> list[str]:
    email = str(user_email or "").strip().lower()
    if not email or not db_dialect or not db_target:
        return []
    try:
        with open_db_connection(db_dialect, db_target) as conn:
            rows = conn.execute(
                """
                SELECT role
                FROM app_users
                WHERE LOWER(email) = ?
                  AND is_active = 1
                """,
                (email,),
            ).fetchall()
    except Exception:
        return []

    mapped = []
    for row in rows:
        role_value = str(row["role"] or "").strip().lower()
        if role_value in {"admin"}:
            mapped.append("admin")
        elif role_value in {"member"}:
            mapped.append("member")
    if not mapped:
        return []
    mapped.sort(key=_role_priority)
    return [mapped[0]]


def _bootstrap_upsert_app_admin(db_dialect: str, db_target: str, user_email: str, display_name: str) -> None:
    email = str(user_email or "").strip().lower()
    if not email or not db_dialect or not db_target:
        return
    name = str(display_name or "").strip() or email
    try:
        with open_db_connection(db_dialect, db_target) as conn:
            existing = conn.execute(
                "SELECT id FROM app_users WHERE LOWER(email) = ?",
                (email,),
            ).fetchone()
            if existing:
                conn.execute(
                    """
                    UPDATE app_users
                    SET display_name = ?, role = 'Admin', auth_provider = 'oidc', is_active = 1
                    WHERE id = ?
                    """,
                    (name, int(existing["id"])),
                )
            else:
                conn.execute(
                    """
                    INSERT INTO app_users (display_name, email, role, auth_provider, is_active)
                    VALUES (?, ?, 'Admin', 'oidc', 1)
                    """,
                    (name, email),
                )
            conn.commit()
    except Exception:
        return


def exchange_entra_oidc_code(code: str, code_verifier: str, redirect_uri: str, db_dialect: str = "", db_target: str = ""):
    if jwt is None:
        raise RuntimeError("OIDC-Bibliothek fehlt. Bitte 'pip install -r requirements.txt' ausfuehren.")
    if not _oidc_is_configured():
        raise RuntimeError("OIDC ist nicht vollstaendig konfiguriert.")
    if not code or not code_verifier:
        raise ValueError("code und code_verifier sind erforderlich")
    if redirect_uri != OIDC_REDIRECT_URI:
        raise ValueError("redirect_uri ungueltig")

    discovery = _http_json_request(_oidc_discovery_url())
    token_endpoint = str(discovery.get("token_endpoint", "")).strip()
    issuer = str(discovery.get("issuer", "")).strip()
    jwks_uri = str(discovery.get("jwks_uri", "")).strip()
    if not token_endpoint or not issuer or not jwks_uri:
        raise RuntimeError("OIDC discovery unvollstaendig")

    form = {
        "grant_type": "authorization_code",
        "client_id": OIDC_CLIENT_ID,
        "code": code,
        "redirect_uri": redirect_uri,
        "code_verifier": code_verifier,
        "scope": OIDC_SCOPE,
    }
    if OIDC_CLIENT_SECRET:
        form["client_secret"] = OIDC_CLIENT_SECRET
    token_data = _http_json_request(token_endpoint, method="POST", data=form)
    id_token = str(token_data.get("id_token", "")).strip()
    if not id_token:
        raise RuntimeError("id_token fehlt in OIDC response")

    jwk_client = jwt.PyJWKClient(jwks_uri)
    signing_key = jwk_client.get_signing_key_from_jwt(id_token)
    claims = jwt.decode(
        id_token,
        signing_key.key,
        algorithms=["RS256"],
        audience=OIDC_CLIENT_ID,
        issuer=issuer,
    )

    user_email = _extract_email_from_claims(claims)
    if not user_email:
        raise PermissionError("OIDC claim preferred_username/upn/email fehlt")
    user_domain = user_email.split("@", 1)[1] if "@" in user_email else ""
    if OIDC_ALLOWED_USERS and user_email not in OIDC_ALLOWED_USERS:
        raise PermissionError("Benutzer ist nicht freigegeben")
    if OIDC_ALLOWED_DOMAINS and user_domain.lower() not in OIDC_ALLOWED_DOMAINS:
        raise PermissionError("E-Mail-Domain ist nicht freigegeben")

    groups = claims.get("groups", [])
    group_ids = {str(item) for item in groups if str(item).strip()} if isinstance(groups, list) else set()
    db_roles = _oidc_roles_from_app_users(db_dialect, db_target, user_email)
    if OIDC_BOOTSTRAP_ADMIN:
        _bootstrap_upsert_app_admin(db_dialect, db_target, user_email, str(claims.get("name") or user_email))
        roles = ["admin"]
    elif OIDC_ADMIN_GROUP_IDS:
        roles = ["admin"] if (group_ids & OIDC_ADMIN_GROUP_IDS) else ["user"]
    elif db_roles:
        roles = db_roles
    else:
        roles = ["user"]

    display_name = str(claims.get("name") or claims.get("preferred_username") or user_email).strip()
    subject = str(claims.get("sub", "")).strip() or user_email
    now_ts = now_unix()
    session_payload = {
        "typ": "portal-session",
        "sub": subject,
        "email": user_email,
        "name": display_name,
        "roles": roles,
        "iat": now_ts,
        "exp": now_ts + AUTH_SESSION_TTL_SECONDS,
        "iss": "kundenportal-api",
    }
    portal_token = make_auth_session_token(session_payload)
    return {
        "access_token": portal_token,
        "expires_in": AUTH_SESSION_TTL_SECONDS,
        "user": {
            "id": subject,
            "email": user_email,
            "display_name": display_name,
            "roles": roles,
        },
    }


def slugify(value: str) -> str:
    base = re.sub(r"[^a-z0-9]+", "-", value.lower()).strip("-")
    return base or "tenant"


def normalize_label(value: str) -> str:
    value = unicodedata.normalize("NFKD", value).encode("ascii", "ignore").decode("ascii")
    value = value.lower().replace("&", " und ")
    value = re.sub(r"[^a-z0-9]+", " ", value)
    return re.sub(r"\s+", " ", value).strip()


def normalize_text_list(value):
    if value is None:
        return []
    if isinstance(value, str):
        items = value.split(",")
    elif isinstance(value, list):
        items = value
    else:
        raise ValueError("ungueltiger Listentyp")
    result = []
    for item in items:
        text = str(item).strip()
        if text and text not in result:
            result.append(text)
    return result


def ensure_unique_slug(conn, desired_slug: str) -> str:
    slug = slugify(desired_slug)
    candidate = slug
    counter = 2
    while conn.execute("SELECT 1 FROM tenants WHERE tenant_slug = ?", (candidate,)).fetchone():
        candidate = f"{slug}-{counter}"
        counter += 1
    return candidate


def parse_percent(value: str):
    cleaned = value.strip().replace("%", "").replace(",", ".")
    if not cleaned:
        return None
    try:
        percent = float(cleaned)
    except ValueError:
        return None
    if percent < 0 or percent > 100:
        return None
    return percent


def status_from_percent(percent: float, target: float = 80.0) -> str:
    if percent >= target:
        return "done"
    if percent >= 60:
        return "inprogress"
    return "open"


def score_badge_by_percent(percent: float):
    if percent >= 80:
        return "good", "Stabil"
    if percent >= 60:
        return "warn", "Ausbau"
    return "bad", "Kritisch"


def infer_status_from_csv_value(raw_value: str, measure_key: str):
    value = raw_value.strip()
    if not value:
        return None

    lowered = value.lower()
    if measure_key == "internal-risk-level":
        if lowered == "i":
            return "done"
        if lowered == "ii":
            return "inprogress"
        if lowered == "iii":
            return "open"
        if lowered == "iv":
            return "open"

    if measure_key in {"m365-advisor-score-target", "domain-risk-level"}:
        percent = parse_percent(value)
        if percent is not None:
            return status_from_percent(percent)

    if lowered in CSV_STATUS_DONE:
        return "done"
    if lowered in CSV_STATUS_INPROGRESS:
        return "inprogress"
    if lowered in CSV_STATUS_OPEN:
        return "open"
    if lowered in CSV_STATUS_LOOP:
        return "loop"
    if lowered in CSV_STATUS_NA:
        return "na"
    return None


def upsert_measure_status(conn, tenant_id: int, measure_id: int, status: str) -> None:
    conn.execute(
        """
        INSERT INTO tenant_security_status (tenant_id, measure_id, status, updated_at)
        VALUES (?, ?, ?, CURRENT_TIMESTAMP)
        ON CONFLICT(tenant_id, measure_id)
        DO UPDATE SET status = excluded.status, updated_at = CURRENT_TIMESTAMP
        """,
        (tenant_id, measure_id, status),
    )


def update_m365_score_from_percent(conn, tenant_id: int, percent: float) -> None:
    row = conn.execute(
        """
        SELECT score_max, score_target, score_categories_json
        FROM tenant_overview
        WHERE tenant_id = ?
        """,
        (tenant_id,),
    ).fetchone()
    if not row:
        return

    score_max = int(row["score_max"])
    score_target = int(row["score_target"])
    score_current = int(round(score_max * (percent / 100.0)))

    try:
        categories = json.loads(row["score_categories_json"])
    except Exception:
        categories = []

    scaled_categories = []
    for category in categories:
        max_points = int(category.get("max", 0) or 0)
        points = int(round(max_points * (percent / 100.0)))
        badge_state, badge_label = score_badge_by_percent(percent)
        scaled_categories.append(
            {
                "name": category.get("name", "Category"),
                "points": points,
                "max": max_points,
                "state": badge_state,
                "label": badge_label,
            }
        )

    conn.execute(
        """
        UPDATE tenant_overview
        SET score_current = ?, score_target = ?, score_categories_json = ?, updated_at = CURRENT_TIMESTAMP
        WHERE tenant_id = ?
        """,
        (score_current, score_target, json.dumps(scaled_categories), tenant_id),
    )


def parse_security_csv_records(csv_path: Path):
    if not csv_path.exists():
        return []

    with csv_path.open("r", encoding="utf-8-sig", newline="") as handle:
        reader = csv.reader(handle)
        rows = list(reader)

    records = []
    expected_columns = len(CSV_SECURITY_HEADERS)
    measure_column_count = expected_columns - 1

    for raw in rows[1:]:
        if not raw or not any(cell.strip() for cell in raw):
            continue

        normalized = list(raw)
        if len(normalized) > expected_columns:
            name_parts = len(normalized) - measure_column_count
            customer_name = ", ".join(part.strip() for part in normalized[:name_parts] if part.strip())
            normalized = [customer_name] + normalized[name_parts:]
        elif len(normalized) < expected_columns:
            normalized.extend([""] * (expected_columns - len(normalized)))

        normalized = [cell.strip() for cell in normalized[:expected_columns]]
        records.append(dict(zip(CSV_SECURITY_HEADERS, normalized)))

    return records


def apply_security_csv_import(conn, csv_path: Path):
    records = parse_security_csv_records(csv_path)
    if not records:
        return {"found": False, "records": 0, "mapped": 0, "created": 0, "updates": 0, "unknown_values": []}

    tenant_rows = conn.execute("SELECT id, tenant_name, tenant_slug FROM tenants").fetchall()
    tenant_index = {normalize_label(row["tenant_name"]): row for row in tenant_rows}
    measure_rows = conn.execute("SELECT id, measure_key FROM security_measures").fetchall()
    measure_ids = {row["measure_key"]: row["id"] for row in measure_rows}

    mapped = 0
    created = 0
    updates = 0
    unknown_values = set()

    for record in records:
        csv_name = record["Kunde"].strip()
        if not csv_name:
            continue

        lookup_name = normalize_label(csv_name)
        lookup_name = CSV_TENANT_NAME_ALIASES.get(lookup_name, lookup_name)
        tenant_row = tenant_index.get(lookup_name)

        if tenant_row is None:
            candidates = [row for key, row in tenant_index.items() if lookup_name and (lookup_name in key or key in lookup_name)]
            if len(candidates) == 1:
                tenant_row = candidates[0]

        if tenant_row is None:
            tenant_slug = ensure_unique_slug(conn, csv_name)
            tenant_id = insert_and_get_id(
                conn,
                "INSERT INTO tenants (tenant_slug, tenant_name) VALUES (?, ?)",
                (tenant_slug, csv_name),
            )
            seed_tenant_portal_content(conn, tenant_id, BASELINE_TENANT_SLUG)
            seed_tenant_security_status(conn, tenant_id)
            seed_tenant_users(conn, tenant_id, tenant_slug, csv_name)
            tenant_row = {"id": tenant_id, "tenant_name": csv_name, "tenant_slug": tenant_slug}
            tenant_index[normalize_label(csv_name)] = tenant_row
            created += 1

        tenant_id = int(tenant_row["id"])
        mapped += 1

        for column_name, measure_key in CSV_COLUMN_TO_MEASURE_KEY.items():
            raw_value = record.get(column_name, "").strip()
            if not raw_value:
                continue

            measure_id = measure_ids.get(measure_key)
            if measure_id is None:
                continue

            if measure_key == "m365-advisor-score-target":
                percent = parse_percent(raw_value)
                if percent is not None:
                    update_m365_score_from_percent(conn, tenant_id, percent)

            status = infer_status_from_csv_value(raw_value, measure_key)
            if status is None:
                unknown_values.add(f"{column_name}: {raw_value}")
                continue

            upsert_measure_status(conn, tenant_id, measure_id, status)
            updates += 1

    return {
        "found": True,
        "records": len(records),
        "mapped": mapped,
        "created": created,
        "updates": updates,
        "unknown_values": sorted(unknown_values),
    }


def seed_tenant_portal_content(conn, tenant_id: int, source_slug: str) -> None:
    overview = PORTAL_OVERVIEWS.get(source_slug) or PORTAL_OVERVIEWS[BASELINE_TENANT_SLUG]
    score = overview["score"]
    kpis = overview["kpis"]
    mdm = overview["mdm"]
    conn.execute(
        """
        INSERT INTO tenant_overview (
          tenant_id, active_services, devices, open_tickets,
          score_current, score_max, score_benchmark, score_target, score_categories_json,
          mdm_compliance, mdm_unmanaged, mdm_policy_drift, mdm_last_sync, updated_at
        )
        VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
        ON CONFLICT(tenant_id) DO UPDATE SET
          active_services = excluded.active_services,
          devices = excluded.devices,
          open_tickets = excluded.open_tickets,
          score_current = excluded.score_current,
          score_max = excluded.score_max,
          score_benchmark = excluded.score_benchmark,
          score_target = excluded.score_target,
          score_categories_json = excluded.score_categories_json,
          mdm_compliance = excluded.mdm_compliance,
          mdm_unmanaged = excluded.mdm_unmanaged,
          mdm_policy_drift = excluded.mdm_policy_drift,
          mdm_last_sync = excluded.mdm_last_sync,
          updated_at = CURRENT_TIMESTAMP
        """,
        (
            tenant_id,
            kpis["active_services"],
            kpis["devices"],
            kpis["open_tickets"],
            score["current"],
            score["max"],
            score["benchmark"],
            score["target"],
            json.dumps(score["categories"]),
            mdm["compliance"],
            mdm["unmanaged"],
            mdm["policy_drift"],
            mdm["last_sync"],
        ),
    )

    conn.execute("DELETE FROM tenant_contracts WHERE tenant_id = ?", (tenant_id,))
    for sort_order, contract in enumerate(PORTAL_CONTRACTS.get(source_slug, PORTAL_CONTRACTS[BASELINE_TENANT_SLUG]), start=1):
        conn.execute(
            """
            INSERT INTO tenant_contracts (
              tenant_id, contract_id, service, package, sla, contract_no,
              status_class, status_label, runtime, description, scope_json, assets_json, sort_order
            )
            VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
            """,
            (
                tenant_id,
                contract["contract_id"],
                contract["service"],
                contract["package"],
                contract["sla"],
                contract["contract_no"],
                contract["status_class"],
                contract["status_label"],
                contract["runtime"],
                contract["description"],
                json.dumps(contract["scope"]),
                json.dumps(contract["assets"]),
                sort_order,
            ),
        )

    conn.execute("DELETE FROM tenant_inventory WHERE tenant_id = ?", (tenant_id,))
    for sort_order, inventory in enumerate(PORTAL_INVENTORY.get(source_slug, PORTAL_INVENTORY[BASELINE_TENANT_SLUG]), start=1):
        conn.execute(
            """
            INSERT INTO tenant_inventory (
              tenant_id, hostname, type, source, user_name, patch, status, label, sort_order
            )
            VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
            """,
            (
                tenant_id,
                inventory["hostname"],
                inventory["type"],
                inventory["source"],
                inventory["user"],
                inventory["patch"],
                inventory["status"],
                inventory["label"],
                sort_order,
            ),
        )

    conn.execute("DELETE FROM tenant_advisor_measures WHERE tenant_id = ?", (tenant_id,))
    advisor = PORTAL_ADVISOR.get(source_slug, PORTAL_ADVISOR[BASELINE_TENANT_SLUG])
    for pillar, entries in advisor.items():
        for sort_order, entry in enumerate(entries, start=1):
            conn.execute(
                """
                INSERT INTO tenant_advisor_measures (
                  tenant_id, pillar, title, state, impact, sort_order
                )
                VALUES (?, ?, ?, ?, ?, ?)
                """,
                (tenant_id, pillar, entry["title"], entry["state"], entry["impact"], sort_order),
            )


def seed_tenant_security_status(conn, tenant_id: int) -> None:
    conn.execute(
        """
        INSERT INTO tenant_security_status (tenant_id, measure_id, status, updated_at)
        SELECT ?, id, default_state, CURRENT_TIMESTAMP
        FROM security_measures
        ON CONFLICT(tenant_id, measure_id) DO NOTHING
        """,
        (tenant_id,),
    )


def seed_tenant_users(conn, tenant_id: int, tenant_slug: str, tenant_name: str) -> None:
    conn.execute("DELETE FROM tenant_users WHERE tenant_id = ?", (tenant_id,))
    users = TENANT_USERS.get(tenant_slug)
    if not users:
        email_prefix = slugify(tenant_slug).replace("-", ".")
        users = [(f"Admin {tenant_name}", f"admin@{email_prefix}.local", "Admin")]

    for display_name, email, role in users:
        conn.execute(
            """
            INSERT INTO tenant_users (tenant_id, display_name, email, role, is_active)
            VALUES (?, ?, ?, ?, 1)
            """,
            (tenant_id, display_name, email, role),
        )


def init_db(db_dialect: str, db_target: str, csv_path: Path) -> None:
    conn = open_db_connection(db_dialect, db_target)
    id_column = "SERIAL PRIMARY KEY"

    conn.execute(
        f"""
        CREATE TABLE IF NOT EXISTS tenants (
            id {id_column},
            tenant_slug TEXT NOT NULL UNIQUE,
            tenant_name TEXT NOT NULL,
            is_archived INTEGER NOT NULL DEFAULT 0,
            created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
        )
        """
    )
    tenant_columns = table_columns(conn, "tenants")
    if "is_archived" not in tenant_columns:
        conn.execute("ALTER TABLE tenants ADD COLUMN is_archived INTEGER NOT NULL DEFAULT 0")
    conn.execute(
        f"""
        CREATE TABLE IF NOT EXISTS security_measures (
            id {id_column},
            measure_key TEXT NOT NULL UNIQUE,
            area TEXT NOT NULL,
            title TEXT NOT NULL,
            default_state TEXT NOT NULL,
            text TEXT NOT NULL
        )
        """
    )
    conn.execute(
        """
        CREATE TABLE IF NOT EXISTS tenant_security_status (
            tenant_id INTEGER NOT NULL,
            measure_id INTEGER NOT NULL,
            status TEXT NOT NULL,
            updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
            PRIMARY KEY (tenant_id, measure_id),
            FOREIGN KEY (tenant_id) REFERENCES tenants(id),
            FOREIGN KEY (measure_id) REFERENCES security_measures(id)
        )
        """
    )
    conn.execute(
        """
        CREATE TABLE IF NOT EXISTS tenant_overview (
            tenant_id INTEGER PRIMARY KEY,
            active_services INTEGER NOT NULL,
            devices INTEGER NOT NULL,
            open_tickets INTEGER NOT NULL,
            score_current INTEGER NOT NULL,
            score_max INTEGER NOT NULL,
            score_benchmark INTEGER NOT NULL,
            score_target INTEGER NOT NULL,
            score_categories_json TEXT NOT NULL,
            mdm_compliance TEXT NOT NULL,
            mdm_unmanaged TEXT NOT NULL,
            mdm_policy_drift TEXT NOT NULL,
            mdm_last_sync TEXT NOT NULL,
            updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
            FOREIGN KEY (tenant_id) REFERENCES tenants(id)
        )
        """
    )
    conn.execute(
        f"""
        CREATE TABLE IF NOT EXISTS tenant_contracts (
            id {id_column},
            tenant_id INTEGER NOT NULL,
            contract_id TEXT NOT NULL,
            service TEXT NOT NULL,
            package TEXT NOT NULL,
            sla TEXT NOT NULL,
            contract_no TEXT NOT NULL,
            status_class TEXT NOT NULL,
            status_label TEXT NOT NULL,
            runtime TEXT NOT NULL,
            description TEXT NOT NULL,
            scope_json TEXT NOT NULL,
            assets_json TEXT NOT NULL,
            sort_order INTEGER NOT NULL DEFAULT 0,
            UNIQUE (tenant_id, contract_id),
            FOREIGN KEY (tenant_id) REFERENCES tenants(id)
        )
        """
    )
    conn.execute(
        f"""
        CREATE TABLE IF NOT EXISTS tenant_inventory (
            id {id_column},
            tenant_id INTEGER NOT NULL,
            hostname TEXT NOT NULL,
            type TEXT NOT NULL,
            source TEXT NOT NULL,
            user_name TEXT NOT NULL,
            patch TEXT NOT NULL,
            status TEXT NOT NULL,
            label TEXT NOT NULL,
            sort_order INTEGER NOT NULL DEFAULT 0,
            FOREIGN KEY (tenant_id) REFERENCES tenants(id)
        )
        """
    )
    conn.execute(
        f"""
        CREATE TABLE IF NOT EXISTS tenant_advisor_measures (
            id {id_column},
            tenant_id INTEGER NOT NULL,
            pillar TEXT NOT NULL,
            title TEXT NOT NULL,
            state TEXT NOT NULL,
            impact TEXT NOT NULL,
            sort_order INTEGER NOT NULL DEFAULT 0,
            FOREIGN KEY (tenant_id) REFERENCES tenants(id)
        )
        """
    )
    conn.execute(
        f"""
        CREATE TABLE IF NOT EXISTS app_users (
            id {id_column},
            display_name TEXT NOT NULL,
            email TEXT NOT NULL UNIQUE,
            role TEXT NOT NULL,
            auth_provider TEXT NOT NULL DEFAULT 'oidc',
            is_active INTEGER NOT NULL DEFAULT 1,
            created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
        )
        """
    )
    conn.execute(
        f"""
        CREATE TABLE IF NOT EXISTS tenant_users (
            id {id_column},
            tenant_id INTEGER NOT NULL,
            display_name TEXT NOT NULL,
            email TEXT NOT NULL,
            role TEXT NOT NULL,
            is_active INTEGER NOT NULL DEFAULT 1,
            created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
            UNIQUE (tenant_id, email),
            FOREIGN KEY (tenant_id) REFERENCES tenants(id)
        )
        """
    )
    conn.execute(
        """
        CREATE TABLE IF NOT EXISTS tenant_service_visibility (
            tenant_id INTEGER NOT NULL,
            service_id TEXT NOT NULL,
            is_hidden INTEGER NOT NULL DEFAULT 1,
            updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
            PRIMARY KEY (tenant_id, service_id),
            FOREIGN KEY (tenant_id) REFERENCES tenants(id)
        )
        """
    )
    conn.execute(
        """
        CREATE TABLE IF NOT EXISTS service_catalog_custom (
            service_id TEXT PRIMARY KEY,
            title TEXT NOT NULL,
            category TEXT NOT NULL,
            subcategory TEXT,
            portfolio TEXT NOT NULL,
            service_level TEXT,
            price_eur REAL,
            billing_cycle TEXT,
            offering_model TEXT NOT NULL DEFAULT 'component',
            scenario_tags_json TEXT NOT NULL DEFAULT '[]',
            scope_json TEXT NOT NULL DEFAULT '[]',
            description TEXT NOT NULL,
            details TEXT NOT NULL,
            contract_keywords_json TEXT NOT NULL,
            relevance_group TEXT,
            includes_json TEXT NOT NULL DEFAULT '[]',
            requires_json TEXT NOT NULL DEFAULT '[]',
            replaces_json TEXT NOT NULL DEFAULT '[]',
            gap_relevant INTEGER NOT NULL DEFAULT 1,
            created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
        )
        """
    )
    conn.execute(
        """
        CREATE TABLE IF NOT EXISTS tenant_managed_navigator (
            tenant_id INTEGER PRIMARY KEY,
            answers_json TEXT NOT NULL DEFAULT '{}',
            selected_bundle_ids_json TEXT NOT NULL DEFAULT '[]',
            recommended_bundle_ids_json TEXT NOT NULL DEFAULT '[]',
            optional_bundle_ids_json TEXT NOT NULL DEFAULT '[]',
            excluded_bundle_ids_json TEXT NOT NULL DEFAULT '[]',
            cart_items_json TEXT NOT NULL DEFAULT '[]',
            updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
            FOREIGN KEY (tenant_id) REFERENCES tenants(id)
        )
        """
    )
    conn.execute(
        """
        CREATE TABLE IF NOT EXISTS tenant_managed_presentation_shares (
            share_id TEXT PRIMARY KEY,
            tenant_id INTEGER NOT NULL,
            navigator_snapshot_json TEXT NOT NULL,
            password_salt TEXT NOT NULL,
            password_hash TEXT NOT NULL,
            expires_at TEXT NOT NULL,
            is_single_use INTEGER NOT NULL DEFAULT 1,
            used_at TEXT,
            revoked_at TEXT,
            open_count INTEGER NOT NULL DEFAULT 0,
            last_opened_at TEXT,
            created_by TEXT,
            created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
            FOREIGN KEY (tenant_id) REFERENCES tenants(id)
        )
        """
    )
    conn.execute(
        f"""
        CREATE TABLE IF NOT EXISTS tenant_managed_navigator_history (
            id {id_column},
            tenant_id INTEGER NOT NULL,
            event_type TEXT NOT NULL,
            event_label TEXT,
            snapshot_json TEXT NOT NULL DEFAULT '{{}}',
            share_id TEXT,
            created_by TEXT,
            created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
            FOREIGN KEY (tenant_id) REFERENCES tenants(id)
        )
        """
    )
    conn.execute(
        """
        CREATE INDEX IF NOT EXISTS idx_managed_history_tenant_created
        ON tenant_managed_navigator_history (tenant_id, created_at DESC)
        """
    )

    managed_navigator_columns = table_columns(conn, "tenant_managed_navigator")
    managed_navigator_column_defs = {
        "cart_items_json": "TEXT NOT NULL DEFAULT '[]'",
    }
    for column_name, column_def in managed_navigator_column_defs.items():
        if column_name not in managed_navigator_columns:
            conn.execute(
                f"ALTER TABLE tenant_managed_navigator ADD COLUMN {column_name} {column_def}"
            )

    service_catalog_columns = table_columns(conn, "service_catalog_custom")
    service_catalog_column_defs = {
        "offering_model": "TEXT NOT NULL DEFAULT 'component'",
        "price_eur": "REAL",
        "billing_cycle": "TEXT",
        "scenario_tags_json": "TEXT NOT NULL DEFAULT '[]'",
        "scope_json": "TEXT NOT NULL DEFAULT '[]'",
        "includes_json": "TEXT NOT NULL DEFAULT '[]'",
        "requires_json": "TEXT NOT NULL DEFAULT '[]'",
        "replaces_json": "TEXT NOT NULL DEFAULT '[]'",
    }
    for column_name, column_def in service_catalog_column_defs.items():
        if column_name not in service_catalog_columns:
            conn.execute(
                f"ALTER TABLE service_catalog_custom ADD COLUMN {column_name} {column_def}"
            )

    conn.execute(
        """
        DELETE FROM tenant_security_status
        WHERE tenant_id NOT IN (SELECT id FROM tenants)
           OR measure_id NOT IN (SELECT id FROM security_measures)
        """
    )
    conn.execute("DELETE FROM tenant_overview WHERE tenant_id NOT IN (SELECT id FROM tenants)")
    conn.execute("DELETE FROM tenant_contracts WHERE tenant_id NOT IN (SELECT id FROM tenants)")
    conn.execute("DELETE FROM tenant_inventory WHERE tenant_id NOT IN (SELECT id FROM tenants)")
    conn.execute("DELETE FROM tenant_advisor_measures WHERE tenant_id NOT IN (SELECT id FROM tenants)")
    conn.execute("DELETE FROM tenant_users WHERE tenant_id NOT IN (SELECT id FROM tenants)")
    conn.execute("DELETE FROM tenant_service_visibility WHERE tenant_id NOT IN (SELECT id FROM tenants)")
    conn.execute("DELETE FROM tenant_managed_navigator WHERE tenant_id NOT IN (SELECT id FROM tenants)")
    conn.execute("DELETE FROM tenant_managed_presentation_shares WHERE tenant_id NOT IN (SELECT id FROM tenants)")
    conn.execute("DELETE FROM tenant_managed_navigator_history WHERE tenant_id NOT IN (SELECT id FROM tenants)")

    conn.executemany(
        """
        INSERT INTO tenants (tenant_slug, tenant_name)
        VALUES (?, ?)
        ON CONFLICT(tenant_slug) DO NOTHING
        """,
        TENANTS,
    )
    conn.executemany(
        """
        INSERT INTO security_measures (measure_key, area, title, default_state, text)
        VALUES (?, ?, ?, ?, ?)
        ON CONFLICT(measure_key) DO UPDATE SET
            area = excluded.area,
            title = excluded.title,
            default_state = excluded.default_state,
            text = excluded.text
        """,
        MEASURES,
    )
    conn.execute(
        """
        WITH old_states AS (
            SELECT
                ts.tenant_id,
                MAX(CASE ts.status WHEN 'open' THEN 4 WHEN 'inprogress' THEN 3 WHEN 'done' THEN 2 WHEN 'loop' THEN 2 ELSE 1 END) AS severity
            FROM tenant_security_status ts
            JOIN security_measures sm ON sm.id = ts.measure_id
            WHERE sm.measure_key IN ('spf-mail-protection', 'dkim-mail-signing', 'dmarc-policy-enforcement')
            GROUP BY ts.tenant_id
        ),
        target AS (
            SELECT id AS measure_id FROM security_measures WHERE measure_key = 'mail-auth-spf-dkim-dmarc'
        )
        INSERT INTO tenant_security_status (tenant_id, measure_id, status, updated_at)
        SELECT
            old_states.tenant_id,
            target.measure_id,
            CASE old_states.severity
                WHEN 4 THEN 'open'
                WHEN 3 THEN 'inprogress'
                WHEN 2 THEN 'done'
                ELSE 'na'
            END AS status,
            CURRENT_TIMESTAMP
        FROM old_states
        CROSS JOIN target
        ON CONFLICT(tenant_id, measure_id)
        DO UPDATE SET status = excluded.status, updated_at = excluded.updated_at
        """
    )
    valid_measure_keys = tuple(measure[0] for measure in MEASURES)
    placeholders = ",".join("?" for _ in valid_measure_keys)
    # `placeholders` are generated from trusted in-process constants (`MEASURES`), not user input.
    cleanup_tenant_status_sql = """
        DELETE FROM tenant_security_status
        WHERE measure_id IN (
            SELECT id FROM security_measures
            WHERE measure_key NOT IN ({})
        )
        """.format(placeholders)  # nosec B608
    conn.execute(cleanup_tenant_status_sql, valid_measure_keys)
    # `placeholders` are generated from trusted in-process constants (`MEASURES`), not user input.
    conn.execute(
        f"DELETE FROM security_measures WHERE measure_key NOT IN ({placeholders})",  # nosec B608
        valid_measure_keys,
    )
    conn.execute(
        """
        INSERT INTO tenant_security_status (tenant_id, measure_id, status, updated_at)
        SELECT t.id, m.id, m.default_state, CURRENT_TIMESTAMP
        FROM tenants t
        CROSS JOIN security_measures m
        ON CONFLICT(tenant_id, measure_id) DO NOTHING
        """
    )

    conn.execute(
        """
        UPDATE tenant_security_status
        SET status = 'done', updated_at = CURRENT_TIMESTAMP
        WHERE tenant_id = 2
          AND measure_id IN (
            SELECT id FROM security_measures WHERE measure_key IN ('entra-ca-mfa-defaults', 'password-manager')
          )
        """
    )
    conn.execute(
        """
        UPDATE tenant_security_status
        SET status = 'open', updated_at = CURRENT_TIMESTAMP
        WHERE tenant_id = 3
          AND measure_id IN (
            SELECT id FROM security_measures WHERE measure_key IN ('nis2-regulatory-check', 'incident-response-playbooks')
          )
        """
    )

    tenant_rows = conn.execute("SELECT id, tenant_slug, tenant_name FROM tenants").fetchall()
    for tenant_row in tenant_rows:
        seed_tenant_portal_content(conn, int(tenant_row["id"]), tenant_row["tenant_slug"])
        seed_tenant_security_status(conn, int(tenant_row["id"]))
        seed_tenant_users(conn, int(tenant_row["id"]), tenant_row["tenant_slug"], tenant_row["tenant_name"])

    csv_summary = apply_security_csv_import(conn, csv_path)
    if csv_summary["found"]:
        print(
            "[kundenportal-api] security.csv import:"
            f" records={csv_summary['records']}, mapped={csv_summary['mapped']},"
            f" created={csv_summary['created']}, updates={csv_summary['updates']}"
        )
        if csv_summary["unknown_values"]:
            print(
                "[kundenportal-api] security.csv unresolved values:"
                f" {', '.join(csv_summary['unknown_values'][:8])}"
            )

    conn.commit()
    conn.close()


class PortalApiHandler(SimpleHTTPRequestHandler):
    db_dialect: str = "postgres"
    db_target: str = ""

    def _is_allowed_origin(self, origin: str) -> bool:
        candidate = str(origin or "").strip()
        if not candidate:
            return False
        if _CORS_ALLOW_ORIGINS_ENV:
            configured = {item.strip() for item in _CORS_ALLOW_ORIGINS_ENV.split(",") if item.strip()}
            if "*" in configured:
                return True
            return candidate in configured
        if APP_ENV in {"development", "dev", "local"}:
            return candidate in DEFAULT_DEV_CORS_ALLOW_ORIGINS
        return False

    def end_headers(self) -> None:
        origin = str(self.headers.get("Origin", "")).strip()
        if self._is_allowed_origin(origin):
            self.send_header("Access-Control-Allow-Origin", origin)
            self.send_header("Vary", "Origin")
        self.send_header("Access-Control-Allow-Methods", "GET, POST, PUT, PATCH, OPTIONS")
        self.send_header("Access-Control-Allow-Headers", "Content-Type, Authorization")
        self.send_header("X-Content-Type-Options", "nosniff")
        self.send_header("X-Frame-Options", "DENY")
        self.send_header("Referrer-Policy", "strict-origin-when-cross-origin")
        self.send_header("Permissions-Policy", "geolocation=(), microphone=(), camera=()")
        forwarded_proto = str(self.headers.get("X-Forwarded-Proto", "")).strip().lower()
        if forwarded_proto == "https":
            self.send_header("Strict-Transport-Security", "max-age=31536000; includeSubDomains")
        super().end_headers()

    def _json(self, payload, status=200) -> None:
        body = json.dumps(payload).encode("utf-8")
        self.send_response(status)
        self.send_header("Content-Type", "application/json; charset=utf-8")
        self.send_header("Content-Length", str(len(body)))
        self.send_header("Cache-Control", "no-store")
        self.end_headers()
        self.wfile.write(body)

    def _read_json(self):
        length = int(self.headers.get("Content-Length", "0"))
        if length > MAX_REQUEST_BODY_BYTES:
            raise ValueError(f"payload zu gross (max {MAX_REQUEST_BODY_BYTES} bytes)")
        raw = self.rfile.read(length) if length else b"{}"
        return json.loads(raw.decode("utf-8") or "{}")

    def _request_base_url(self) -> str:
        host = str(self.headers.get("Host", "127.0.0.1:8000")).strip() or "127.0.0.1:8000"
        if not re.fullmatch(r"[A-Za-z0-9\.\-:]+", host):
            host = "127.0.0.1:8000"
        forwarded_proto = str(self.headers.get("X-Forwarded-Proto", "")).strip().lower()
        scheme = forwarded_proto if forwarded_proto in {"http", "https"} else "http"
        return f"{scheme}://{host}"

    def _connect(self):
        return open_db_connection(self.db_dialect, self.db_target)

    def _bearer_token_from_request(self) -> str:
        auth_header = str(self.headers.get("Authorization", "")).strip()
        if auth_header.lower().startswith("bearer "):
            return auth_header[7:].strip()
        parsed = urlparse(self.path)
        token = str(parse_qs(parsed.query).get("access_token", [""])[0]).strip()
        return token

    def _authenticated_session(self):
        token = self._bearer_token_from_request()
        if not token:
            return None
        return verify_auth_session_token(token)

    @staticmethod
    def _session_roles(session) -> set[str]:
        if not isinstance(session, dict):
            return set()
        roles = session.get("roles", [])
        if not isinstance(roles, list):
            return set()
        return {str(role).strip().lower() for role in roles if str(role).strip()}

    def _require_admin_session(self):
        if not RBAC_ENFORCED:
            return {"roles": ["admin"], "sub": "rbac-disabled"}
        session = self._authenticated_session()
        if not session:
            self._json({"error": "nicht authentifiziert"}, status=401)
            return None
        if "admin" not in self._session_roles(session):
            self._json({"error": "keine ausreichenden Berechtigungen"}, status=403)
            return None
        return session

    @staticmethod
    def _tenant_id_from_query(parsed):
        tenant_id = parse_qs(parsed.query).get("tenant_id", [None])[0]
        if tenant_id is None:
            return None
        try:
            return int(tenant_id)
        except ValueError:
            return None

    def _get_tenant_name(self, conn, tenant_id: int, include_archived: bool = False):
        query = "SELECT tenant_name FROM tenants WHERE id = ?"
        if not include_archived:
            query += " AND is_archived = 0"
        row = conn.execute(query, (tenant_id,)).fetchone()
        return row["tenant_name"] if row else None

    def _get_overview(self, conn, tenant_id: int):
        row = conn.execute(
            """
            SELECT
              active_services, devices, open_tickets,
              score_current, score_max, score_benchmark, score_target, score_categories_json,
              mdm_compliance, mdm_unmanaged, mdm_policy_drift, mdm_last_sync
            FROM tenant_overview
            WHERE tenant_id = ?
            """,
            (tenant_id,),
        ).fetchone()
        if not row:
            return None
        return {
            "kpis": {
                "active_services": row["active_services"],
                "devices": row["devices"],
                "open_tickets": row["open_tickets"],
            },
            "score": {
                "current": row["score_current"],
                "max": row["score_max"],
                "benchmark": row["score_benchmark"],
                "target": row["score_target"],
                "categories": json.loads(row["score_categories_json"]),
            },
            "mdm": {
                "compliance": row["mdm_compliance"],
                "unmanaged": row["mdm_unmanaged"],
                "policy_drift": row["mdm_policy_drift"],
                "last_sync": row["mdm_last_sync"],
            },
        }

    def _get_contracts(self, conn, tenant_id: int):
        rows = conn.execute(
            """
            SELECT
              contract_id, service, package, sla, contract_no,
              status_class, status_label, runtime, description, scope_json, assets_json
            FROM tenant_contracts
            WHERE tenant_id = ?
            ORDER BY sort_order
            """,
            (tenant_id,),
        ).fetchall()
        return [
            {
                "id": row["contract_id"],
                "service": row["service"],
                "package": row["package"],
                "sla": row["sla"],
                "contractNo": row["contract_no"],
                "statusClass": row["status_class"],
                "statusLabel": row["status_label"],
                "runtime": row["runtime"],
                "description": row["description"],
                "scope": json.loads(row["scope_json"]),
                "assets": json.loads(row["assets_json"]),
            }
            for row in rows
        ]

    def _get_inventory(self, conn, tenant_id: int):
        rows = conn.execute(
            """
            SELECT hostname, type, source, user_name, patch, status, label
            FROM tenant_inventory
            WHERE tenant_id = ?
            ORDER BY sort_order
            """,
            (tenant_id,),
        ).fetchall()
        return [
            {
                "hostname": row["hostname"],
                "type": row["type"],
                "source": row["source"],
                "user": row["user_name"],
                "patch": row["patch"],
                "status": row["status"],
                "label": row["label"],
            }
            for row in rows
        ]

    def _get_advisor(self, conn, tenant_id: int, pillar=None):
        params = [tenant_id]
        query = """
            SELECT pillar, title, state, impact
            FROM tenant_advisor_measures
            WHERE tenant_id = ?
        """
        if pillar:
            query += " AND pillar = ?"
            params.append(pillar)
        query += " ORDER BY pillar, sort_order"
        rows = conn.execute(query, tuple(params)).fetchall()

        if pillar:
            return [
                {"title": row["title"], "state": row["state"], "impact": row["impact"]}
                for row in rows
            ]

        grouped = {"identity": [], "device": [], "data": [], "apps": []}
        for row in rows:
            grouped.setdefault(row["pillar"], []).append(
                {"title": row["title"], "state": row["state"], "impact": row["impact"]}
            )
        return grouped

    def _get_users(self, conn, tenant_id: int):
        rows = conn.execute(
            """
            SELECT id, tenant_id, display_name, email, role, is_active, created_at
            FROM tenant_users
            WHERE tenant_id = ?
            ORDER BY LOWER(display_name), display_name
            """,
            (tenant_id,),
        ).fetchall()
        return [
            {
                "id": row["id"],
                "tenant_id": row["tenant_id"],
                "display_name": row["display_name"],
                "email": row["email"],
                "role": row["role"],
                "is_active": bool(row["is_active"]),
                "created_at": row["created_at"],
            }
            for row in rows
        ]

    def _get_app_users(self, conn):
        rows = conn.execute(
            """
            SELECT id, display_name, email, role, auth_provider, is_active, created_at
            FROM app_users
            ORDER BY LOWER(display_name), display_name
            """
        ).fetchall()
        return [
            {
                "id": row["id"],
                "display_name": row["display_name"],
                "email": row["email"],
                "role": row["role"],
                "auth_provider": row["auth_provider"],
                "is_active": bool(row["is_active"]),
                "created_at": row["created_at"],
            }
            for row in rows
        ]

    def _get_service_visibility(self, conn, tenant_id: int):
        rows = conn.execute(
            """
            SELECT service_id, is_hidden, updated_at
            FROM tenant_service_visibility
            WHERE tenant_id = ?
            ORDER BY LOWER(service_id), service_id
            """,
            (tenant_id,),
        ).fetchall()
        hidden_service_ids = [row["service_id"] for row in rows if int(row["is_hidden"]) == 1]
        return {
            "tenant_id": tenant_id,
            "hidden_service_ids": hidden_service_ids,
            "entries": [
                {
                    "service_id": row["service_id"],
                    "is_hidden": bool(row["is_hidden"]),
                    "updated_at": row["updated_at"],
                }
                for row in rows
            ],
        }

    def _get_service_catalog_custom(self, conn):
        rows = conn.execute(
            """
            SELECT
              service_id,
              title,
              category,
              subcategory,
              portfolio,
              service_level,
              price_eur,
              billing_cycle,
              offering_model,
              scenario_tags_json,
              scope_json,
              description,
              details,
              contract_keywords_json,
              relevance_group,
              includes_json,
              requires_json,
              replaces_json,
              gap_relevant,
              created_at
            FROM service_catalog_custom
            ORDER BY LOWER(title), title
            """
        ).fetchall()
        result = []
        for row in rows:
            keywords = []
            scenario_tags = []
            scope = []
            includes = []
            requires = []
            replaces = []
            try:
                parsed = json.loads(row["contract_keywords_json"] or "[]")
                if isinstance(parsed, list):
                    keywords = [str(item).strip() for item in parsed if str(item).strip()]
            except Exception:
                keywords = []
            try:
                parsed = json.loads(row["scenario_tags_json"] or "[]")
                if isinstance(parsed, list):
                    scenario_tags = [str(item).strip() for item in parsed if str(item).strip()]
            except Exception:
                scenario_tags = []
            try:
                parsed = json.loads(row["scope_json"] or "[]")
                if isinstance(parsed, list):
                    scope = [str(item).strip() for item in parsed if str(item).strip()]
            except Exception:
                scope = []
            try:
                parsed = json.loads(row["includes_json"] or "[]")
                if isinstance(parsed, list):
                    includes = [str(item).strip() for item in parsed if str(item).strip()]
            except Exception:
                includes = []
            try:
                parsed = json.loads(row["requires_json"] or "[]")
                if isinstance(parsed, list):
                    requires = [str(item).strip() for item in parsed if str(item).strip()]
            except Exception:
                requires = []
            try:
                parsed = json.loads(row["replaces_json"] or "[]")
                if isinstance(parsed, list):
                    replaces = [str(item).strip() for item in parsed if str(item).strip()]
            except Exception:
                replaces = []
            result.append(
                {
                    "id": row["service_id"],
                    "title": row["title"],
                    "category": row["category"],
                    "subcategory": row["subcategory"],
                    "portfolio": row["portfolio"],
                    "serviceLevel": row["service_level"],
                    "priceEur": row["price_eur"],
                    "billingCycle": row["billing_cycle"],
                    "offeringModel": row["offering_model"],
                    "scenarioTags": scenario_tags,
                    "scope": scope,
                    "description": row["description"],
                    "details": row["details"],
                    "contractKeywords": keywords,
                    "relevanceGroup": row["relevance_group"],
                    "includes": includes,
                    "requires": requires,
                    "replaces": replaces,
                    "gapRelevant": bool(row["gap_relevant"]),
                    "isCustom": True,
                    "created_at": row["created_at"],
                }
            )
        return result

    @staticmethod
    def _default_managed_navigator_payload(tenant_id: int):
        return {
            "tenant_id": tenant_id,
            "answers": {},
            "selected_bundle_ids": [],
            "recommended_bundle_ids": [],
            "optional_bundle_ids": [],
            "excluded_bundle_ids": [],
            "cart_items": [],
            "updated_at": None,
        }

    @staticmethod
    def _normalize_string_list(value):
        if value is None:
            return []
        if not isinstance(value, list):
            raise ValueError("listenfeld muss Array sein")
        cleaned = []
        for entry in value:
            text = str(entry).strip()
            if not text:
                continue
            if len(text) > 120:
                raise ValueError("listeneintrag zu lang (max 120)")
            cleaned.append(text)
        deduped = []
        seen = set()
        for entry in cleaned:
            if entry in seen:
                continue
            deduped.append(entry)
            seen.add(entry)
        return deduped

    @staticmethod
    def _normalize_managed_answers(value):
        if value is None:
            return {}
        if not isinstance(value, dict):
            raise ValueError("answers muss Objekt sein")
        normalized = {}
        for key, field_value in value.items():
            name = str(key).strip()
            if not name:
                continue
            if len(name) > 80:
                raise ValueError("answer key zu lang (max 80)")
            if isinstance(field_value, bool):
                if name in {"device_count", "mainpc_count", "mailbox_count"}:
                    normalized[name] = 1 if field_value else 0
                else:
                    normalized[name] = field_value
                continue
            if isinstance(field_value, (int, float)):
                if name in {"device_count", "mainpc_count", "mailbox_count"}:
                    normalized[name] = max(0, int(field_value))
                else:
                    normalized[name] = field_value
                continue
            text = str(field_value).strip()
            if len(text) > 200:
                raise ValueError("answer value zu lang (max 200)")
            if name in {"device_count", "mainpc_count", "mailbox_count"}:
                try:
                    normalized[name] = max(0, int(float(text)))
                except (TypeError, ValueError):
                    normalized[name] = 0
            else:
                normalized[name] = text
        return normalized

    @staticmethod
    def _normalize_cart_items(value):
        if value is None:
            return []
        if not isinstance(value, list):
            raise ValueError("cart_items muss Array sein")
        normalized = []
        seen = set()
        for entry in value:
            if not isinstance(entry, dict):
                continue
            service_id = str(entry.get("service_id", "")).strip()
            if not service_id or service_id in seen:
                continue
            if len(service_id) > 120:
                raise ValueError("cart_items.service_id zu lang (max 120)")
            source_type = str(entry.get("source_type", "bundle")).strip().lower() or "bundle"
            if source_type not in {"bundle", "addon"}:
                raise ValueError("cart_items.source_type muss bundle oder addon sein")
            try:
                quantity = int(entry.get("quantity", 0))
            except (TypeError, ValueError):
                quantity = 0
            if quantity < 0:
                raise ValueError("cart_items.quantity darf nicht negativ sein")
            normalized.append(
                {
                    "service_id": service_id,
                    "source_type": source_type,
                    "quantity": quantity,
                }
            )
            seen.add(service_id)
        return normalized

    @staticmethod
    def _normalize_managed_presentation_snapshot(value):
        if not isinstance(value, dict):
            raise ValueError("snapshot muss Objekt sein")
        tenant_name = str(value.get("tenant_name", "")).strip()
        profile_text = str(value.get("profile_text", "")).strip()
        answers = PortalApiHandler._normalize_managed_answers(value.get("answers", {}))
        bundles_raw = value.get("bundles", [])
        if not isinstance(bundles_raw, list):
            raise ValueError("snapshot.bundles muss Array sein")
        bundles = []
        seen_bundle_ids = set()
        for entry in bundles_raw:
            if not isinstance(entry, dict):
                continue
            bundle_id = str(entry.get("id", "")).strip()
            if not bundle_id or bundle_id in seen_bundle_ids:
                continue
            name = str(entry.get("name", "")).strip()
            category = str(entry.get("category", "")).strip()
            service_level = str(entry.get("serviceLevel", "")).strip()
            summary = str(entry.get("summary", "")).strip()
            contract_terms = str(entry.get("contractTerms", "")).strip()
            problem = str(entry.get("problem", "")).strip()
            response = str(entry.get("response", "")).strip()
            details = entry.get("details", [])
            if not isinstance(details, list):
                details = []
            cleaned_details = [str(item).strip() for item in details if str(item).strip()][:8]
            try:
                quantity = int(entry.get("quantity", 0))
            except (TypeError, ValueError):
                quantity = 0
            quantity = max(0, quantity)
            bundles.append(
                {
                    "id": bundle_id[:120],
                    "name": name[:220],
                    "category": category[:120],
                    "serviceLevel": service_level[:80],
                    "summary": summary[:800],
                    "details": cleaned_details,
                    "contractTerms": contract_terms[:800],
                    "quantity": quantity,
                    "problem": problem[:800],
                    "response": response[:800],
                }
            )
            seen_bundle_ids.add(bundle_id)

        return {
            "tenant_name": tenant_name[:180],
            "answers": answers,
            "profile_text": profile_text[:1200],
            "bundles": bundles,
            "generated_at": now_iso(),
        }

    @staticmethod
    def _normalize_managed_history_snapshot(value):
        if not isinstance(value, dict):
            raise ValueError("snapshot muss Objekt sein")
        answers = PortalApiHandler._normalize_managed_answers(value.get("answers", {}))
        selected_bundle_ids = PortalApiHandler._normalize_string_list(value.get("selected_bundle_ids", []))
        recommended_bundle_ids = PortalApiHandler._normalize_string_list(value.get("recommended_bundle_ids", []))
        optional_bundle_ids = PortalApiHandler._normalize_string_list(value.get("optional_bundle_ids", []))
        excluded_bundle_ids = PortalApiHandler._normalize_string_list(value.get("excluded_bundle_ids", []))
        cart_items = PortalApiHandler._normalize_cart_items(value.get("cart_items", []))
        generated_at = str(value.get("generated_at", "")).strip()[:64] or now_iso()
        return {
            "answers": answers,
            "selected_bundle_ids": selected_bundle_ids,
            "recommended_bundle_ids": recommended_bundle_ids,
            "optional_bundle_ids": optional_bundle_ids,
            "excluded_bundle_ids": excluded_bundle_ids,
            "cart_items": cart_items,
            "generated_at": generated_at,
        }

    @staticmethod
    def _normalize_managed_history_event_type(value):
        event_type = str(value or "").strip().lower()
        if event_type not in {"offer", "presentation", "manual"}:
            raise ValueError("event_type muss offer, presentation oder manual sein")
        return event_type

    def _create_managed_navigator_history_entry(
        self,
        conn,
        tenant_id: int,
        event_type: str,
        snapshot: dict,
        event_label: str = "",
        share_id: str = "",
        created_by: str = "",
    ):
        normalized_snapshot = self._normalize_managed_history_snapshot(snapshot)
        normalized_event_type = self._normalize_managed_history_event_type(event_type)
        label = str(event_label or "").strip()[:220] or None
        normalized_share_id = str(share_id or "").strip()[:120] or None
        normalized_created_by = str(created_by or "").strip()[:180] or None
        values = (
            tenant_id,
            normalized_event_type,
            label,
            json.dumps(normalized_snapshot),
            normalized_share_id,
            normalized_created_by,
        )
        row = conn.execute(
            """
            INSERT INTO tenant_managed_navigator_history (
                tenant_id, event_type, event_label, snapshot_json, share_id, created_by, created_at
            )
            VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
            RETURNING id
            """,
            values,
        ).fetchone()
        return int(row["id"])

    def _history_snapshot_from_managed_navigator_payload(self, payload):
        if not isinstance(payload, dict):
            return self._normalize_managed_history_snapshot({})
        return self._normalize_managed_history_snapshot(
            {
                "answers": payload.get("answers", {}),
                "selected_bundle_ids": payload.get("selected_bundle_ids", []),
                "recommended_bundle_ids": payload.get("recommended_bundle_ids", []),
                "optional_bundle_ids": payload.get("optional_bundle_ids", []),
                "excluded_bundle_ids": payload.get("excluded_bundle_ids", []),
                "cart_items": payload.get("cart_items", []),
                "generated_at": now_iso(),
            }
        )

    def _history_snapshot_from_presentation_snapshot(self, snapshot):
        if not isinstance(snapshot, dict):
            return self._normalize_managed_history_snapshot({})
        bundles = snapshot.get("bundles", [])
        if not isinstance(bundles, list):
            bundles = []
        selected_bundle_ids = []
        cart_items = []
        for entry in bundles:
            if not isinstance(entry, dict):
                continue
            bundle_id = str(entry.get("id", "")).strip()
            if not bundle_id:
                continue
            selected_bundle_ids.append(bundle_id)
            try:
                quantity = int(entry.get("quantity", 0))
            except (TypeError, ValueError):
                quantity = 0
            cart_items.append(
                {
                    "service_id": bundle_id,
                    "source_type": "bundle",
                    "quantity": max(0, quantity),
                }
            )
        return self._normalize_managed_history_snapshot(
            {
                "answers": snapshot.get("answers", {}),
                "selected_bundle_ids": selected_bundle_ids,
                "recommended_bundle_ids": selected_bundle_ids,
                "optional_bundle_ids": [],
                "excluded_bundle_ids": [],
                "cart_items": cart_items,
                "generated_at": now_iso(),
            }
        )

    def _get_managed_navigator_history(self, conn, tenant_id: int, limit: int = 30):
        try:
            normalized_limit = int(limit)
        except (TypeError, ValueError):
            normalized_limit = 30
        normalized_limit = max(1, min(100, normalized_limit))
        rows = conn.execute(
            """
            SELECT id, tenant_id, event_type, event_label, snapshot_json, share_id, created_by, created_at
            FROM tenant_managed_navigator_history
            WHERE tenant_id = ?
            ORDER BY created_at DESC, id DESC
            LIMIT ?
            """,
            (tenant_id, normalized_limit),
        ).fetchall()
        items = []
        for row in rows:
            try:
                snapshot = self._normalize_managed_history_snapshot(json.loads(row["snapshot_json"] or "{}"))
            except Exception:
                snapshot = self._normalize_managed_history_snapshot({})
            items.append(
                {
                    "id": row["id"],
                    "tenant_id": row["tenant_id"],
                    "event_type": row["event_type"],
                    "event_label": row["event_label"],
                    "snapshot": snapshot,
                    "share_id": row["share_id"],
                    "created_by": row["created_by"],
                    "created_at": row["created_at"],
                }
            )
        return {
            "tenant_id": tenant_id,
            "items": items,
        }

    @staticmethod
    def _normalize_share_ttl_days(value):
        try:
            ttl_days = int(value)
        except (TypeError, ValueError):
            ttl_days = 7
        ttl_days = max(1, min(MANAGED_PRESENTATION_MAX_TTL_DAYS, ttl_days))
        return ttl_days

    def _create_managed_presentation_share(self, conn, payload):
        tenant_id = payload.get("tenant_id")
        password = payload.get("password")
        snapshot_raw = payload.get("snapshot")
        ttl_days = self._normalize_share_ttl_days(payload.get("ttl_days", 7))
        single_use = bool(payload.get("single_use", True))
        created_by = str(payload.get("created_by", "")).strip()[:180] or None

        if not isinstance(tenant_id, int):
            raise ValueError("tenant_id muss Integer sein")
        if not isinstance(password, str) or len(password.strip()) < 8:
            raise ValueError("password muss mindestens 8 Zeichen haben")
        snapshot = self._normalize_managed_presentation_snapshot(snapshot_raw)

        if not self._get_tenant_name(conn, tenant_id, include_archived=True):
            raise LookupError("tenant nicht gefunden")

        share_id = make_share_id()
        while conn.execute(
            "SELECT 1 FROM tenant_managed_presentation_shares WHERE share_id = ?",
            (share_id,),
        ).fetchone():
            share_id = make_share_id()

        expires_at_dt = datetime.now(tz=timezone.utc).replace(microsecond=0) + timedelta(days=ttl_days)
        expires_at = expires_at_dt.isoformat()
        salt = make_password_salt()
        password_hash = hash_password_with_salt(salt, password.strip())
        conn.execute(
            """
            INSERT INTO tenant_managed_presentation_shares (
                share_id, tenant_id, navigator_snapshot_json, password_salt, password_hash,
                expires_at, is_single_use, created_by, created_at
            ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
            """,
            (
                share_id,
                tenant_id,
                json.dumps(snapshot),
                salt,
                password_hash,
                expires_at,
                1 if single_use else 0,
                created_by,
            ),
        )
        history_snapshot = self._history_snapshot_from_presentation_snapshot(snapshot_raw)
        self._create_managed_navigator_history_entry(
            conn,
            tenant_id,
            "presentation",
            history_snapshot,
            event_label="Kundenpraesentation erstellt",
            share_id=share_id,
            created_by=created_by or "",
        )
        conn.commit()
        return {
            "share_id": share_id,
            "tenant_id": tenant_id,
            "expires_at": expires_at,
            "single_use": single_use,
        }

    def _authenticate_managed_presentation_share(self, conn, share_id: str, password: str):
        row = conn.execute(
            """
            SELECT share_id, password_salt, password_hash, expires_at, is_single_use, used_at, revoked_at
            FROM tenant_managed_presentation_shares
            WHERE share_id = ?
            """,
            (share_id,),
        ).fetchone()
        if not row:
            raise LookupError("share nicht gefunden")
        if row["revoked_at"]:
            raise PermissionError("link widerrufen")
        try:
            expires_at_dt = datetime.fromisoformat(str(row["expires_at"]))
        except Exception:
            raise PermissionError("link abgelaufen")
        if expires_at_dt < datetime.now(tz=timezone.utc):
            raise PermissionError("link abgelaufen")
        if int(row["is_single_use"] or 0) == 1 and row["used_at"]:
            raise PermissionError("link bereits verwendet")
        if not verify_password_hash(str(row["password_salt"]), str(password or ""), str(row["password_hash"])):
            raise PermissionError("passwort ungueltig")

        token_payload = {
            "typ": "managed-presentation",
            "share_id": row["share_id"],
            "iat": now_unix(),
            "exp": now_unix() + MANAGED_PRESENTATION_TOKEN_TTL_SECONDS,
        }
        return sign_managed_presentation_token(token_payload)

    def _view_managed_presentation_share(self, conn, share_id: str, token: str):
        token_payload = verify_managed_presentation_token(token)
        if not token_payload or str(token_payload.get("share_id", "")) != share_id:
            raise PermissionError("ungueltiger token")

        row = conn.execute(
            """
            SELECT
              share_id, tenant_id, navigator_snapshot_json, expires_at,
              is_single_use, used_at, revoked_at, open_count, last_opened_at
            FROM tenant_managed_presentation_shares
            WHERE share_id = ?
            """,
            (share_id,),
        ).fetchone()
        if not row:
            raise LookupError("share nicht gefunden")
        if row["revoked_at"]:
            raise PermissionError("link widerrufen")
        try:
            expires_at_dt = datetime.fromisoformat(str(row["expires_at"]))
        except Exception:
            raise PermissionError("link abgelaufen")
        if expires_at_dt < datetime.now(tz=timezone.utc):
            raise PermissionError("link abgelaufen")
        if int(row["is_single_use"] or 0) == 1 and row["used_at"]:
            raise PermissionError("link bereits verwendet")

        try:
            snapshot = json.loads(row["navigator_snapshot_json"] or "{}")
        except Exception:
            snapshot = {}

        now_stamp = now_iso()
        conn.execute(
            """
            UPDATE tenant_managed_presentation_shares
            SET
              open_count = open_count + 1,
              last_opened_at = ?,
              used_at = CASE WHEN is_single_use = 1 AND used_at IS NULL THEN ? ELSE used_at END
            WHERE share_id = ?
            """,
            (now_stamp, now_stamp, share_id),
        )
        conn.commit()

        return {
            "share_id": share_id,
            "tenant_id": row["tenant_id"],
            "expires_at": row["expires_at"],
            "single_use": bool(row["is_single_use"]),
            "snapshot": snapshot,
        }

    def _revoke_managed_presentation_share(self, conn, share_id: str):
        row = conn.execute(
            "SELECT share_id FROM tenant_managed_presentation_shares WHERE share_id = ?",
            (share_id,),
        ).fetchone()
        if not row:
            raise LookupError("share nicht gefunden")
        now_stamp = now_iso()
        conn.execute(
            "UPDATE tenant_managed_presentation_shares SET revoked_at = ? WHERE share_id = ?",
            (now_stamp, share_id),
        )
        conn.commit()
        return {"share_id": share_id, "revoked_at": now_stamp}

    def _list_managed_presentation_shares(self, conn, tenant_id: int, limit: int = 50):
        try:
            normalized_limit = int(limit)
        except (TypeError, ValueError):
            normalized_limit = 50
        normalized_limit = max(1, min(200, normalized_limit))
        rows = conn.execute(
            """
            SELECT
              share_id,
              tenant_id,
              expires_at,
              is_single_use,
              used_at,
              revoked_at,
              open_count,
              last_opened_at,
              created_by,
              created_at
            FROM tenant_managed_presentation_shares
            WHERE tenant_id = ?
            ORDER BY created_at DESC
            LIMIT ?
            """,
            (tenant_id, normalized_limit),
        ).fetchall()
        now_dt = datetime.now(tz=timezone.utc)
        items = []
        for row in rows:
            expires_at_raw = str(row["expires_at"] or "").strip()
            status = "active"
            try:
                expires_dt = datetime.fromisoformat(expires_at_raw)
                if expires_dt.tzinfo is None:
                    expires_dt = expires_dt.replace(tzinfo=timezone.utc)
                if expires_dt < now_dt:
                    status = "expired"
            except Exception:
                status = "expired"
            if row["revoked_at"]:
                status = "revoked"
            elif int(row["is_single_use"] or 0) == 1 and row["used_at"]:
                status = "used"
            items.append(
                {
                    "share_id": row["share_id"],
                    "tenant_id": row["tenant_id"],
                    "expires_at": row["expires_at"],
                    "single_use": bool(row["is_single_use"]),
                    "used_at": row["used_at"],
                    "revoked_at": row["revoked_at"],
                    "open_count": int(row["open_count"] or 0),
                    "last_opened_at": row["last_opened_at"],
                    "created_by": row["created_by"],
                    "created_at": row["created_at"],
                    "status": status,
                }
            )
        return {
            "tenant_id": tenant_id,
            "items": items,
        }

    def _extend_managed_presentation_share(self, conn, share_id: str, extend_days: int):
        try:
            normalized_days = int(extend_days)
        except (TypeError, ValueError):
            raise ValueError("extend_days muss Integer sein")
        normalized_days = max(1, min(MANAGED_PRESENTATION_MAX_TTL_DAYS, normalized_days))
        row = conn.execute(
            """
            SELECT share_id, expires_at, revoked_at
            FROM tenant_managed_presentation_shares
            WHERE share_id = ?
            """,
            (share_id,),
        ).fetchone()
        if not row:
            raise LookupError("share nicht gefunden")
        if row["revoked_at"]:
            raise PermissionError("widerrufene Links koennen nicht verlaengert werden")
        now_dt = datetime.now(tz=timezone.utc).replace(microsecond=0)
        try:
            base_dt = datetime.fromisoformat(str(row["expires_at"]))
            if base_dt.tzinfo is None:
                base_dt = base_dt.replace(tzinfo=timezone.utc)
        except Exception:
            base_dt = now_dt
        if base_dt < now_dt:
            base_dt = now_dt
        new_expires = (base_dt + timedelta(days=normalized_days)).isoformat()
        conn.execute(
            "UPDATE tenant_managed_presentation_shares SET expires_at = ? WHERE share_id = ?",
            (new_expires, share_id),
        )
        conn.commit()
        return {
            "share_id": share_id,
            "expires_at": new_expires,
            "extended_days": normalized_days,
        }

    def _get_managed_navigator(self, conn, tenant_id: int):
        row = conn.execute(
            """
            SELECT
              tenant_id,
              answers_json,
              selected_bundle_ids_json,
              recommended_bundle_ids_json,
              optional_bundle_ids_json,
              excluded_bundle_ids_json,
              cart_items_json,
              updated_at
            FROM tenant_managed_navigator
            WHERE tenant_id = ?
            """,
            (tenant_id,),
        ).fetchone()
        if not row:
            return self._default_managed_navigator_payload(tenant_id)

        payload = self._default_managed_navigator_payload(tenant_id)
        payload["updated_at"] = row["updated_at"]
        for source_key, target_key in [
            ("answers_json", "answers"),
            ("selected_bundle_ids_json", "selected_bundle_ids"),
            ("recommended_bundle_ids_json", "recommended_bundle_ids"),
            ("optional_bundle_ids_json", "optional_bundle_ids"),
            ("excluded_bundle_ids_json", "excluded_bundle_ids"),
            ("cart_items_json", "cart_items"),
        ]:
            try:
                parsed = json.loads(row[source_key] or ("{}" if target_key == "answers" else "[]"))
            except Exception:
                parsed = {} if target_key == "answers" else []
            payload[target_key] = parsed
        try:
            payload["answers"] = self._normalize_managed_answers(payload["answers"])
            payload["selected_bundle_ids"] = self._normalize_string_list(payload["selected_bundle_ids"])
            payload["recommended_bundle_ids"] = self._normalize_string_list(payload["recommended_bundle_ids"])
            payload["optional_bundle_ids"] = self._normalize_string_list(payload["optional_bundle_ids"])
            payload["excluded_bundle_ids"] = self._normalize_string_list(payload["excluded_bundle_ids"])
            payload["cart_items"] = self._normalize_cart_items(payload["cart_items"])
        except ValueError:
            return self._default_managed_navigator_payload(tenant_id)
        return payload

    def _upsert_managed_navigator(self, conn, tenant_id: int, payload):
        answers = self._normalize_managed_answers(payload.get("answers", {}))
        selected_bundle_ids = self._normalize_string_list(payload.get("selected_bundle_ids", []))
        recommended_bundle_ids = self._normalize_string_list(payload.get("recommended_bundle_ids", []))
        optional_bundle_ids = self._normalize_string_list(payload.get("optional_bundle_ids", []))
        excluded_bundle_ids = self._normalize_string_list(payload.get("excluded_bundle_ids", []))
        cart_items = self._normalize_cart_items(payload.get("cart_items", []))

        conn.execute(
            """
            INSERT INTO tenant_managed_navigator (
              tenant_id,
              answers_json,
              selected_bundle_ids_json,
              recommended_bundle_ids_json,
              optional_bundle_ids_json,
              excluded_bundle_ids_json,
              cart_items_json,
              updated_at
            )
            VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
            ON CONFLICT(tenant_id) DO UPDATE SET
              answers_json = excluded.answers_json,
              selected_bundle_ids_json = excluded.selected_bundle_ids_json,
              recommended_bundle_ids_json = excluded.recommended_bundle_ids_json,
              optional_bundle_ids_json = excluded.optional_bundle_ids_json,
              excluded_bundle_ids_json = excluded.excluded_bundle_ids_json,
              cart_items_json = excluded.cart_items_json,
              updated_at = CURRENT_TIMESTAMP
            """,
            (
                tenant_id,
                json.dumps(answers),
                json.dumps(selected_bundle_ids),
                json.dumps(recommended_bundle_ids),
                json.dumps(optional_bundle_ids),
                json.dumps(excluded_bundle_ids),
                json.dumps(cart_items),
            ),
        )
        self._create_managed_navigator_history_entry(
            conn,
            tenant_id,
            "offer",
            self._history_snapshot_from_managed_navigator_payload(
                {
                    "answers": answers,
                    "selected_bundle_ids": selected_bundle_ids,
                    "recommended_bundle_ids": recommended_bundle_ids,
                    "optional_bundle_ids": optional_bundle_ids,
                    "excluded_bundle_ids": excluded_bundle_ids,
                    "cart_items": cart_items,
                }
            ),
            event_label="Angebot gespeichert",
            created_by=str(payload.get("created_by", "") or ""),
        )
        conn.commit()
        return self._get_managed_navigator(conn, tenant_id)

    def do_OPTIONS(self):
        self.send_response(204)
        self.end_headers()

    def do_GET(self):
        parsed = urlparse(self.path)
        if parsed.path == "/api/health":
            return self._json({"status": "ok", "time": now_iso()})

        if parsed.path == "/api/auth/oidc/config":
            return self._json(
                {
                    "enabled": _oidc_is_configured(),
                    "tenant_id": OIDC_TENANT_ID,
                    "client_id": OIDC_CLIENT_ID,
                    "redirect_uri": OIDC_REDIRECT_URI,
                    "scope": OIDC_SCOPE,
                    "authorize_endpoint": (
                        f"https://login.microsoftonline.com/{OIDC_TENANT_ID}/oauth2/v2.0/authorize"
                        if OIDC_TENANT_ID
                        else ""
                    ),
                }
            )

        if parsed.path == "/api/auth/me":
            session = self._authenticated_session()
            if not session:
                return self._json({"error": "nicht authentifiziert"}, status=401)
            return self._json(
                {
                    "id": session.get("sub", ""),
                    "email": session.get("email", ""),
                    "display_name": session.get("name", ""),
                    "roles": session.get("roles", []),
                    "expires_at": session.get("exp", 0),
                }
            )

        if parsed.path == "/api/tenants":
            include_archived = parse_qs(parsed.query).get("include_archived", ["0"])[0].lower() in {"1", "true", "yes"}
            query = "SELECT id, tenant_slug AS slug, tenant_name AS name, is_archived FROM tenants"
            if not include_archived:
                query += " WHERE is_archived = 0"
            query += " ORDER BY tenant_name"
            with self._connect() as conn:
                rows = conn.execute(query).fetchall()
            return self._json([dict(row) for row in rows])

        if parsed.path == "/api/users":
            if self._require_admin_session() is None:
                return
            tenant_id = self._tenant_id_from_query(parsed)
            if tenant_id is None:
                return self._json({"error": "tenant_id erforderlich (Integer)"}, status=400)
            with self._connect() as conn:
                if not self._get_tenant_name(conn, tenant_id):
                    return self._json({"error": "tenant nicht gefunden"}, status=404)
                users = self._get_users(conn, tenant_id)
            return self._json(users)

        if parsed.path == "/api/app/users":
            if self._require_admin_session() is None:
                return
            with self._connect() as conn:
                users = self._get_app_users(conn)
            return self._json(users)

        if parsed.path == "/api/portal/overview":
            tenant_id = self._tenant_id_from_query(parsed)
            if tenant_id is None:
                return self._json({"error": "tenant_id erforderlich (Integer)"}, status=400)
            with self._connect() as conn:
                tenant_name = self._get_tenant_name(conn, tenant_id)
                overview = self._get_overview(conn, tenant_id)
            if not tenant_name or not overview:
                return self._json({"error": "tenant nicht gefunden"}, status=404)
            payload = {"tenant_id": tenant_id, "tenant_name": tenant_name}
            payload.update(overview)
            return self._json(payload)

        if parsed.path == "/api/managed/contracts":
            tenant_id = self._tenant_id_from_query(parsed)
            if tenant_id is None:
                return self._json({"error": "tenant_id erforderlich (Integer)"}, status=400)
            with self._connect() as conn:
                if not self._get_tenant_name(conn, tenant_id):
                    return self._json({"error": "tenant nicht gefunden"}, status=404)
                contracts = self._get_contracts(conn, tenant_id)
            return self._json(contracts)

        if parsed.path == "/api/inventory":
            tenant_id = self._tenant_id_from_query(parsed)
            if tenant_id is None:
                return self._json({"error": "tenant_id erforderlich (Integer)"}, status=400)
            with self._connect() as conn:
                if not self._get_tenant_name(conn, tenant_id):
                    return self._json({"error": "tenant nicht gefunden"}, status=404)
                inventory = self._get_inventory(conn, tenant_id)
            return self._json(inventory)

        if parsed.path == "/api/advisor":
            tenant_id = self._tenant_id_from_query(parsed)
            pillar = parse_qs(parsed.query).get("pillar", [None])[0]
            if tenant_id is None:
                return self._json({"error": "tenant_id erforderlich (Integer)"}, status=400)
            with self._connect() as conn:
                if not self._get_tenant_name(conn, tenant_id):
                    return self._json({"error": "tenant nicht gefunden"}, status=404)
                advisor = self._get_advisor(conn, tenant_id, pillar)
            return self._json(advisor)

        if parsed.path == "/api/portal/profile":
            tenant_id = self._tenant_id_from_query(parsed)
            if tenant_id is None:
                return self._json({"error": "tenant_id erforderlich (Integer)"}, status=400)
            with self._connect() as conn:
                tenant_name = self._get_tenant_name(conn, tenant_id)
                if not tenant_name:
                    return self._json({"error": "tenant nicht gefunden"}, status=404)
                overview = self._get_overview(conn, tenant_id)
                contracts = self._get_contracts(conn, tenant_id)
                inventory = self._get_inventory(conn, tenant_id)
                advisor = self._get_advisor(conn, tenant_id)
            if not overview:
                return self._json({"error": "keine Portaldaten fuer tenant vorhanden"}, status=404)
            return self._json(
                {
                    "tenant_id": tenant_id,
                    "tenant_name": tenant_name,
                    "kpis": overview["kpis"],
                    "score": overview["score"],
                    "mdm": overview["mdm"],
                    "contracts": contracts,
                    "inventory": inventory,
                    "advisor": advisor,
                }
            )

        if parsed.path == "/api/security/measures":
            tenant_id = self._tenant_id_from_query(parsed)
            if tenant_id is None:
                return self._json({"error": "tenant_id erforderlich (Integer)"}, status=400)

            with self._connect() as conn:
                if not self._get_tenant_name(conn, tenant_id):
                    return self._json({"error": "tenant nicht gefunden"}, status=404)
                rows = conn.execute(
                    """
                    SELECT
                        sm.id AS measure_id,
                        sm.measure_key AS key,
                        sm.area,
                        sm.title,
                        sm.text,
                        COALESCE(ts.status, sm.default_state) AS state
                    FROM security_measures sm
                    LEFT JOIN tenant_security_status ts
                      ON ts.measure_id = sm.id
                    AND ts.tenant_id = ?
                    ORDER BY sm.id
                    """,
                    (tenant_id,),
                ).fetchall()
            return self._json([dict(row) for row in rows])

        if parsed.path == "/api/services/visibility":
            tenant_id = self._tenant_id_from_query(parsed)
            if tenant_id is None:
                return self._json({"error": "tenant_id erforderlich (Integer)"}, status=400)
            with self._connect() as conn:
                if not self._get_tenant_name(conn, tenant_id, include_archived=True):
                    return self._json({"error": "tenant nicht gefunden"}, status=404)
                visibility = self._get_service_visibility(conn, tenant_id)
            return self._json(visibility)

        if parsed.path == "/api/services/catalog":
            with self._connect() as conn:
                custom_services = self._get_service_catalog_custom(conn)
            return self._json(custom_services)

        if parsed.path == "/api/managed-navigator":
            tenant_id = self._tenant_id_from_query(parsed)
            if tenant_id is None:
                return self._json({"error": "tenant_id erforderlich (Integer)"}, status=400)
            with self._connect() as conn:
                if not self._get_tenant_name(conn, tenant_id, include_archived=True):
                    return self._json({"error": "tenant nicht gefunden"}, status=404)
                payload = self._get_managed_navigator(conn, tenant_id)
            return self._json(payload)

        if parsed.path == "/api/managed-navigator/history":
            tenant_id = self._tenant_id_from_query(parsed)
            if tenant_id is None:
                return self._json({"error": "tenant_id erforderlich (Integer)"}, status=400)
            query = parse_qs(parsed.query)
            limit = query.get("limit", [30])[0]
            with self._connect() as conn:
                if not self._get_tenant_name(conn, tenant_id, include_archived=True):
                    return self._json({"error": "tenant nicht gefunden"}, status=404)
                payload = self._get_managed_navigator_history(conn, tenant_id, limit)
            return self._json(payload)

        if parsed.path == "/api/managed-presentation/shares":
            tenant_id = self._tenant_id_from_query(parsed)
            if tenant_id is None:
                return self._json({"error": "tenant_id erforderlich (Integer)"}, status=400)
            query = parse_qs(parsed.query)
            limit = query.get("limit", [50])[0]
            with self._connect() as conn:
                if not self._get_tenant_name(conn, tenant_id, include_archived=True):
                    return self._json({"error": "tenant nicht gefunden"}, status=404)
                payload = self._list_managed_presentation_shares(conn, tenant_id, limit)
            return self._json(payload)

        if parsed.path == "/api/managed-presentation/view":
            query = parse_qs(parsed.query)
            share_id = str(query.get("share", [""])[0] or "").strip()
            token = str(query.get("token", [""])[0] or "").strip()
            if not share_id:
                return self._json({"error": "share erforderlich"}, status=400)
            if not token:
                auth_header = self.headers.get("Authorization", "")
                if auth_header.lower().startswith("bearer "):
                    token = auth_header[7:].strip()
            if not token:
                return self._json({"error": "token erforderlich"}, status=401)
            with self._connect() as conn:
                try:
                    payload = self._view_managed_presentation_share(conn, share_id, token)
                except LookupError:
                    return self._json({"error": "share nicht gefunden"}, status=404)
                except PermissionError as exc:
                    return self._json({"error": str(exc)}, status=403)
            return self._json(payload)

        return super().do_GET()

    def do_POST(self):
        parsed = urlparse(self.path)

        try:
            payload = self._read_json()
        except json.JSONDecodeError:
            return self._json({"error": "ungueltiges JSON"}, status=400)
        except ValueError as exc:
            return self._json({"error": str(exc)}, status=413)

        if parsed.path == "/api/auth/oidc/exchange":
            code = str(payload.get("code", "")).strip()
            code_verifier = str(payload.get("code_verifier", "")).strip()
            redirect_uri = str(payload.get("redirect_uri", "")).strip()
            try:
                auth_payload = exchange_entra_oidc_code(
                    code,
                    code_verifier,
                    redirect_uri,
                    self.db_dialect,
                    self.db_target,
                )
            except ValueError as exc:
                return self._json({"error": str(exc)}, status=400)
            except PermissionError as exc:
                return self._json({"error": str(exc)}, status=403)
            except RuntimeError as exc:
                return self._json({"error": str(exc)}, status=500)
            except urlerror.HTTPError as exc:
                return self._json({"error": f"OIDC provider error ({exc.code})"}, status=502)
            except Exception:
                return self._json({"error": "OIDC login fehlgeschlagen"}, status=502)
            return self._json(auth_payload, status=200)

        if parsed.path == "/api/app/users":
            if self._require_admin_session() is None:
                return
            display_name = payload.get("display_name")
            email = payload.get("email")
            role = payload.get("role")
            auth_provider = str(payload.get("auth_provider", "oidc")).strip().lower() or "oidc"

            if not isinstance(display_name, str) or not display_name.strip():
                return self._json({"error": "display_name ist erforderlich"}, status=400)
            if not isinstance(email, str) or not email.strip():
                return self._json({"error": "email ist erforderlich"}, status=400)
            if role not in VALID_APP_USER_ROLES:
                return self._json({"error": f"role muss in {sorted(VALID_APP_USER_ROLES)} sein"}, status=400)
            if auth_provider not in VALID_APP_AUTH_PROVIDERS:
                return self._json({"error": f"auth_provider muss in {sorted(VALID_APP_AUTH_PROVIDERS)} sein"}, status=400)

            display_name = display_name.strip()
            email = email.strip().lower()
            if len(display_name) > 140:
                return self._json({"error": "display_name zu lang (max 140)"}, status=400)
            if len(email) > 254:
                return self._json({"error": "email zu lang (max 254)"}, status=400)
            if not re.fullmatch(r"[^@\s]+@[^@\s]+\.[^@\s]+", email):
                return self._json({"error": "email ungueltig"}, status=400)

            with self._connect() as conn:
                try:
                    user_id = insert_and_get_id(
                        conn,
                        """
                        INSERT INTO app_users (display_name, email, role, auth_provider, is_active)
                        VALUES (?, ?, ?, ?, 1)
                        """,
                        (display_name, email, role, auth_provider),
                    )
                except DB_INTEGRITY_ERRORS:
                    return self._json({"error": "email existiert bereits"}, status=409)
                conn.commit()
                row = conn.execute(
                    """
                    SELECT id, display_name, email, role, auth_provider, is_active, created_at
                    FROM app_users
                    WHERE id = ?
                    """,
                    (user_id,),
                ).fetchone()
            return self._json(
                {
                    "id": row["id"],
                    "display_name": row["display_name"],
                    "email": row["email"],
                    "role": row["role"],
                    "auth_provider": row["auth_provider"],
                    "is_active": bool(row["is_active"]),
                    "created_at": row["created_at"],
                },
                status=201,
            )

        if parsed.path == "/api/tenants":
            if self._require_admin_session() is None:
                return
            tenant_name = payload.get("tenant_name")
            tenant_slug_input = payload.get("tenant_slug")

            if not isinstance(tenant_name, str) or not tenant_name.strip():
                return self._json({"error": "tenant_name ist erforderlich"}, status=400)
            tenant_name = tenant_name.strip()
            if len(tenant_name) > 140:
                return self._json({"error": "tenant_name zu lang (max 140)"}, status=400)

            if tenant_slug_input is not None and not isinstance(tenant_slug_input, str):
                return self._json({"error": "tenant_slug muss String sein"}, status=400)
            slug_source = (tenant_slug_input or tenant_name).strip()
            if not slug_source:
                slug_source = tenant_name

            with self._connect() as conn:
                tenant_slug = ensure_unique_slug(conn, slug_source)
                tenant_id = insert_and_get_id(
                    conn,
                    "INSERT INTO tenants (tenant_slug, tenant_name) VALUES (?, ?)",
                    (tenant_slug, tenant_name),
                )
                seed_tenant_portal_content(conn, tenant_id, BASELINE_TENANT_SLUG)
                seed_tenant_security_status(conn, tenant_id)
                seed_tenant_users(conn, tenant_id, tenant_slug, tenant_name)
                conn.commit()
            return self._json(
                {"id": tenant_id, "slug": tenant_slug, "name": tenant_name},
                status=201,
            )

        if parsed.path == "/api/users":
            if self._require_admin_session() is None:
                return
            tenant_id = payload.get("tenant_id")
            display_name = payload.get("display_name")
            email = payload.get("email")
            role = payload.get("role")

            if not isinstance(tenant_id, int):
                return self._json({"error": "tenant_id muss Integer sein"}, status=400)
            if not isinstance(display_name, str) or not display_name.strip():
                return self._json({"error": "display_name ist erforderlich"}, status=400)
            if not isinstance(email, str) or not email.strip():
                return self._json({"error": "email ist erforderlich"}, status=400)
            if role not in VALID_TENANT_USER_ROLES:
                return self._json({"error": f"role muss in {sorted(VALID_TENANT_USER_ROLES)} sein"}, status=400)

            display_name = display_name.strip()
            email = email.strip().lower()
            if len(display_name) > 140:
                return self._json({"error": "display_name zu lang (max 140)"}, status=400)
            if len(email) > 254:
                return self._json({"error": "email zu lang (max 254)"}, status=400)
            if not re.fullmatch(r"[^@\s]+@[^@\s]+\.[^@\s]+", email):
                return self._json({"error": "email ungueltig"}, status=400)

            with self._connect() as conn:
                if not self._get_tenant_name(conn, tenant_id):
                    return self._json({"error": "tenant nicht gefunden"}, status=404)
                try:
                    user_id = insert_and_get_id(
                        conn,
                        """
                        INSERT INTO tenant_users (tenant_id, display_name, email, role, is_active)
                        VALUES (?, ?, ?, ?, 1)
                        """,
                        (tenant_id, display_name, email, role),
                    )
                except DB_INTEGRITY_ERRORS:
                    return self._json({"error": "email existiert fuer diesen tenant bereits"}, status=409)
                conn.commit()
                row = conn.execute(
                    """
                    SELECT id, tenant_id, display_name, email, role, is_active, created_at
                    FROM tenant_users
                    WHERE id = ?
                    """,
                    (user_id,),
                ).fetchone()
            return self._json(
                {
                    "id": row["id"],
                    "tenant_id": row["tenant_id"],
                    "display_name": row["display_name"],
                    "email": row["email"],
                    "role": row["role"],
                    "is_active": bool(row["is_active"]),
                    "created_at": row["created_at"],
                },
                status=201,
            )

        if parsed.path == "/api/services/catalog":
            if self._require_admin_session() is None:
                return
            title = payload.get("title")
            category = payload.get("category")
            portfolio = payload.get("portfolio")
            offering_model = payload.get("offeringModel", "component")
            scenario_tags_value = payload.get("scenarioTags", [])
            scope_value = payload.get("scope", [])
            description = payload.get("description")
            details = payload.get("details")
            service_id_input = payload.get("id")
            subcategory = payload.get("subcategory")
            service_level = payload.get("serviceLevel")
            price_eur_value = payload.get("priceEur")
            billing_cycle_value = payload.get("billingCycle")
            relevance_group = payload.get("relevanceGroup")
            includes_value = payload.get("includes", [])
            requires_value = payload.get("requires", [])
            replaces_value = payload.get("replaces", [])
            gap_relevant = payload.get("gapRelevant", True)
            keywords_value = payload.get("contractKeywords", [])

            if not isinstance(title, str) or not title.strip():
                return self._json({"error": "title ist erforderlich"}, status=400)
            if not isinstance(category, str) or not category.strip():
                return self._json({"error": "category ist erforderlich"}, status=400)
            if not isinstance(portfolio, str) or not portfolio.strip():
                return self._json({"error": "portfolio ist erforderlich"}, status=400)
            if not isinstance(description, str) or not description.strip():
                return self._json({"error": "description ist erforderlich"}, status=400)
            if not isinstance(details, str) or not details.strip():
                return self._json({"error": "details ist erforderlich"}, status=400)
            if offering_model not in {"component", "solution"}:
                return self._json({"error": "offeringModel muss component oder solution sein"}, status=400)
            if not isinstance(gap_relevant, bool):
                return self._json({"error": "gapRelevant muss Boolean sein"}, status=400)

            title = title.strip()
            category = category.strip()
            portfolio = portfolio.strip()
            description = description.strip()
            details = details.strip()
            subcategory = subcategory.strip() if isinstance(subcategory, str) and subcategory.strip() else None
            service_level = service_level.strip() if isinstance(service_level, str) and service_level.strip() else None
            billing_cycle = (
                str(billing_cycle_value).strip().lower()
                if isinstance(billing_cycle_value, str) and billing_cycle_value.strip()
                else None
            )
            relevance_group = relevance_group.strip() if isinstance(relevance_group, str) and relevance_group.strip() else None
            if billing_cycle is not None and billing_cycle not in {"monatlich", "jaehrlich", "einmalig"}:
                return self._json({"error": "billingCycle muss monatlich, jaehrlich oder einmalig sein"}, status=400)

            if price_eur_value in (None, ""):
                price_eur = None
            else:
                try:
                    price_eur = float(price_eur_value)
                except (TypeError, ValueError):
                    return self._json({"error": "priceEur muss numerisch sein"}, status=400)
                if price_eur < 0:
                    return self._json({"error": "priceEur darf nicht negativ sein"}, status=400)
                price_eur = round(price_eur, 2)

            if len(title) > 180:
                return self._json({"error": "title zu lang (max 180)"}, status=400)
            if len(description) > 1200 or len(details) > 2000:
                return self._json({"error": "description/details zu lang"}, status=400)

            if isinstance(keywords_value, str):
                keywords = [entry.strip() for entry in keywords_value.split(",") if entry.strip()]
            elif isinstance(keywords_value, list):
                keywords = [str(entry).strip() for entry in keywords_value if str(entry).strip()]
            else:
                return self._json({"error": "contractKeywords muss String oder Liste sein"}, status=400)

            try:
                scenario_tags = normalize_text_list(scenario_tags_value)
                scope = normalize_text_list(scope_value)
                includes = normalize_text_list(includes_value)
                requires = normalize_text_list(requires_value)
                replaces = normalize_text_list(replaces_value)
            except ValueError:
                return self._json(
                    {"error": "scenarioTags, scope, includes, requires und replaces muessen String oder Liste sein"},
                    status=400,
                )

            if isinstance(service_id_input, str) and service_id_input.strip():
                service_id = service_id_input.strip().lower()
            else:
                service_id = slugify(title).replace("-", "_")
            service_id = re.sub(r"[^a-z0-9_]+", "_", service_id).strip("_")
            if not service_id:
                return self._json({"error": "ungueltige id"}, status=400)
            if len(service_id) > 120:
                return self._json({"error": "id zu lang (max 120)"}, status=400)

            with self._connect() as conn:
                existing = conn.execute(
                    "SELECT 1 FROM service_catalog_custom WHERE service_id = ?",
                    (service_id,),
                ).fetchone()
                if existing:
                    return self._json({"error": "service_id existiert bereits"}, status=409)

                conn.execute(
                    """
                    INSERT INTO service_catalog_custom (
                      service_id, title, category, subcategory, portfolio, service_level,
                      price_eur, billing_cycle,
                      offering_model, scenario_tags_json, scope_json,
                      description, details, contract_keywords_json, relevance_group,
                      includes_json, requires_json, replaces_json, gap_relevant
                    )
                    VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
                    """,
                    (
                        service_id,
                        title,
                        category,
                        subcategory,
                        portfolio,
                        service_level,
                        price_eur,
                        billing_cycle,
                        offering_model,
                        json.dumps(scenario_tags),
                        json.dumps(scope),
                        description,
                        details,
                        json.dumps(keywords),
                        relevance_group,
                        json.dumps(includes),
                        json.dumps(requires),
                        json.dumps(replaces),
                        1 if gap_relevant else 0,
                    ),
                )
                conn.commit()
                row = conn.execute(
                    """
                    SELECT
                      service_id, title, category, subcategory, portfolio, service_level,
                      price_eur, billing_cycle,
                      offering_model, scenario_tags_json, scope_json,
                      description, details, contract_keywords_json, relevance_group,
                      includes_json, requires_json, replaces_json, gap_relevant, created_at
                    FROM service_catalog_custom
                    WHERE service_id = ?
                    """,
                    (service_id,),
                ).fetchone()

            created = {
                "id": row["service_id"],
                "title": row["title"],
                "category": row["category"],
                "subcategory": row["subcategory"],
                "portfolio": row["portfolio"],
                "serviceLevel": row["service_level"],
                "priceEur": row["price_eur"],
                "billingCycle": row["billing_cycle"],
                "offeringModel": row["offering_model"],
                "scenarioTags": json.loads(row["scenario_tags_json"] or "[]"),
                "scope": json.loads(row["scope_json"] or "[]"),
                "description": row["description"],
                "details": row["details"],
                "contractKeywords": json.loads(row["contract_keywords_json"] or "[]"),
                "relevanceGroup": row["relevance_group"],
                "includes": json.loads(row["includes_json"] or "[]"),
                "requires": json.loads(row["requires_json"] or "[]"),
                "replaces": json.loads(row["replaces_json"] or "[]"),
                "gapRelevant": bool(row["gap_relevant"]),
                "isCustom": True,
                "created_at": row["created_at"],
            }
            return self._json(created, status=201)

        if parsed.path == "/api/managed-presentation/shares":
            if self._require_admin_session() is None:
                return
            with self._connect() as conn:
                try:
                    created = self._create_managed_presentation_share(conn, payload)
                except LookupError:
                    return self._json({"error": "tenant nicht gefunden"}, status=404)
                except ValueError as exc:
                    return self._json({"error": str(exc)}, status=400)
            base_url = self._request_base_url()
            created["share_url"] = f"{base_url}/managed-presentation.html?share={created['share_id']}"
            return self._json(created, status=201)

        if parsed.path == "/api/managed-presentation/auth":
            share_id = str(payload.get("share_id", "")).strip()
            password = str(payload.get("password", ""))
            if not share_id:
                return self._json({"error": "share_id erforderlich"}, status=400)
            with self._connect() as conn:
                try:
                    token = self._authenticate_managed_presentation_share(conn, share_id, password)
                except LookupError:
                    return self._json({"error": "share nicht gefunden"}, status=404)
                except PermissionError as exc:
                    return self._json({"error": str(exc)}, status=403)
            return self._json(
                {
                    "share_id": share_id,
                    "access_token": token,
                    "expires_in": MANAGED_PRESENTATION_TOKEN_TTL_SECONDS,
                }
            )

        if parsed.path.startswith("/api/managed-presentation/shares/") and parsed.path.endswith("/revoke"):
            if self._require_admin_session() is None:
                return
            parts = [part for part in parsed.path.split("/") if part]
            share_id = parts[3] if len(parts) >= 5 else ""
            share_id = str(share_id).strip()
            if not share_id:
                return self._json({"error": "share_id erforderlich"}, status=400)
            with self._connect() as conn:
                try:
                    revoked = self._revoke_managed_presentation_share(conn, share_id)
                except LookupError:
                    return self._json({"error": "share nicht gefunden"}, status=404)
            return self._json(revoked)

        return self._json({"error": "unbekannter POST Endpoint"}, status=404)

    def do_PUT(self):
        parsed = urlparse(self.path)
        if parsed.path != "/api/managed-navigator":
            return self._json({"error": "unbekannter PUT Endpoint"}, status=404)

        tenant_id = self._tenant_id_from_query(parsed)
        if tenant_id is None:
            return self._json({"error": "tenant_id erforderlich (Integer)"}, status=400)

        try:
            payload = self._read_json()
        except json.JSONDecodeError:
            return self._json({"error": "ungueltiges JSON"}, status=400)
        except ValueError as exc:
            return self._json({"error": str(exc)}, status=413)

        if not isinstance(payload, dict):
            return self._json({"error": "payload muss Objekt sein"}, status=400)

        with self._connect() as conn:
            if not self._get_tenant_name(conn, tenant_id, include_archived=True):
                return self._json({"error": "tenant nicht gefunden"}, status=404)
            try:
                saved = self._upsert_managed_navigator(conn, tenant_id, payload)
            except ValueError as exc:
                return self._json({"error": str(exc)}, status=400)
        return self._json(saved)

    def do_PATCH(self):
        parsed = urlparse(self.path)
        parts = [part for part in parsed.path.split("/") if part]

        if len(parts) == 4 and parts[:3] == ["api", "managed-presentation", "shares"]:
            if self._require_admin_session() is None:
                return
            share_id = str(parts[3] or "").strip()
            if not share_id:
                return self._json({"error": "share_id erforderlich"}, status=400)
            try:
                payload = self._read_json()
            except json.JSONDecodeError:
                return self._json({"error": "ungueltiges JSON"}, status=400)
            except ValueError as exc:
                return self._json({"error": str(exc)}, status=413)
            if not isinstance(payload, dict):
                return self._json({"error": "payload muss Objekt sein"}, status=400)
            extend_days = payload.get("extend_days")
            if extend_days is None:
                return self._json({"error": "extend_days erforderlich"}, status=400)
            with self._connect() as conn:
                try:
                    result = self._extend_managed_presentation_share(conn, share_id, extend_days)
                except LookupError:
                    return self._json({"error": "share nicht gefunden"}, status=404)
                except PermissionError as exc:
                    return self._json({"error": str(exc)}, status=409)
                except ValueError as exc:
                    return self._json({"error": str(exc)}, status=400)
            return self._json(result)

        if len(parts) == 3 and parts[0] == "api" and parts[1] == "tenants":
            if self._require_admin_session() is None:
                return
            try:
                tenant_id = int(parts[2])
            except ValueError:
                return self._json({"error": "ungueltige tenant_id"}, status=400)

            try:
                payload = self._read_json()
            except json.JSONDecodeError:
                return self._json({"error": "ungueltiges JSON"}, status=400)
            except ValueError as exc:
                return self._json({"error": str(exc)}, status=413)

            if "archived" not in payload or not isinstance(payload.get("archived"), bool):
                return self._json({"error": "archived (Boolean) ist erforderlich"}, status=400)

            archive_target = 1 if payload["archived"] else 0
            with self._connect() as conn:
                row = conn.execute(
                    "SELECT id, tenant_slug, tenant_name, is_archived FROM tenants WHERE id = ?",
                    (tenant_id,),
                ).fetchone()
                if not row:
                    return self._json({"error": "tenant nicht gefunden"}, status=404)

                if archive_target == 1 and row["is_archived"] == 0:
                    active_count = conn.execute(
                        "SELECT COUNT(*) AS c FROM tenants WHERE is_archived = 0"
                    ).fetchone()["c"]
                    if active_count <= 1:
                        return self._json({"error": "mindestens ein aktiver tenant muss bestehen bleiben"}, status=409)

                conn.execute(
                    "UPDATE tenants SET is_archived = ? WHERE id = ?",
                    (archive_target, tenant_id),
                )
                conn.commit()
                updated = conn.execute(
                    "SELECT id, tenant_slug AS slug, tenant_name AS name, is_archived FROM tenants WHERE id = ?",
                    (tenant_id,),
                ).fetchone()
            return self._json(dict(updated))

        if len(parts) == 3 and parts[0] == "api" and parts[1] == "users":
            if self._require_admin_session() is None:
                return
            try:
                user_id = int(parts[2])
            except ValueError:
                return self._json({"error": "ungueltige user_id"}, status=400)

            try:
                payload = self._read_json()
            except json.JSONDecodeError:
                return self._json({"error": "ungueltiges JSON"}, status=400)
            except ValueError as exc:
                return self._json({"error": str(exc)}, status=413)

            allowed_fields = {"display_name", "email", "role", "is_active"}
            provided = {key for key in payload.keys() if key in allowed_fields}
            if not provided:
                return self._json({"error": "mindestens ein Feld aus display_name,email,role,is_active erforderlich"}, status=400)

            with self._connect() as conn:
                current = conn.execute(
                    "SELECT id, tenant_id, display_name, email, role, is_active FROM tenant_users WHERE id = ?",
                    (user_id,),
                ).fetchone()
                if not current:
                    return self._json({"error": "user nicht gefunden"}, status=404)

                display_name = current["display_name"]
                email = current["email"]
                role = current["role"]
                is_active = current["is_active"]

                if "display_name" in payload:
                    if not isinstance(payload["display_name"], str) or not payload["display_name"].strip():
                        return self._json({"error": "display_name muss nicht-leerer String sein"}, status=400)
                    display_name = payload["display_name"].strip()
                    if len(display_name) > 140:
                        return self._json({"error": "display_name zu lang (max 140)"}, status=400)

                if "email" in payload:
                    if not isinstance(payload["email"], str) or not payload["email"].strip():
                        return self._json({"error": "email muss nicht-leerer String sein"}, status=400)
                    email = payload["email"].strip().lower()
                    if len(email) > 254:
                        return self._json({"error": "email zu lang (max 254)"}, status=400)
                    if not re.fullmatch(r"[^@\s]+@[^@\s]+\.[^@\s]+", email):
                        return self._json({"error": "email ungueltig"}, status=400)

                if "role" in payload:
                    if payload["role"] not in VALID_TENANT_USER_ROLES:
                        return self._json({"error": f"role muss in {sorted(VALID_TENANT_USER_ROLES)} sein"}, status=400)
                    role = payload["role"]

                if "is_active" in payload:
                    if not isinstance(payload["is_active"], bool):
                        return self._json({"error": "is_active muss Boolean sein"}, status=400)
                    is_active = 1 if payload["is_active"] else 0

                try:
                    conn.execute(
                        """
                        UPDATE tenant_users
                        SET display_name = ?, email = ?, role = ?, is_active = ?
                        WHERE id = ?
                        """,
                        (display_name, email, role, is_active, user_id),
                    )
                except DB_INTEGRITY_ERRORS:
                    return self._json({"error": "email existiert fuer diesen tenant bereits"}, status=409)

                conn.commit()
                updated = conn.execute(
                    """
                    SELECT id, tenant_id, display_name, email, role, is_active, created_at
                    FROM tenant_users
                    WHERE id = ?
                    """,
                    (user_id,),
                ).fetchone()
            return self._json(
                {
                    "id": updated["id"],
                    "tenant_id": updated["tenant_id"],
                    "display_name": updated["display_name"],
                    "email": updated["email"],
                    "role": updated["role"],
                    "is_active": bool(updated["is_active"]),
                    "created_at": updated["created_at"],
                }
            )

        if len(parts) == 4 and parts[:3] == ["api", "app", "users"]:
            if self._require_admin_session() is None:
                return
            try:
                user_id = int(parts[3])
            except ValueError:
                return self._json({"error": "ungueltige user_id"}, status=400)

            try:
                payload = self._read_json()
            except json.JSONDecodeError:
                return self._json({"error": "ungueltiges JSON"}, status=400)
            except ValueError as exc:
                return self._json({"error": str(exc)}, status=413)

            allowed_fields = {"display_name", "email", "role", "auth_provider", "is_active"}
            provided = {key for key in payload.keys() if key in allowed_fields}
            if not provided:
                return self._json({"error": "mindestens ein Feld aus display_name,email,role,auth_provider,is_active erforderlich"}, status=400)

            with self._connect() as conn:
                current = conn.execute(
                    "SELECT id, display_name, email, role, auth_provider, is_active FROM app_users WHERE id = ?",
                    (user_id,),
                ).fetchone()
                if not current:
                    return self._json({"error": "user nicht gefunden"}, status=404)

                display_name = current["display_name"]
                email = current["email"]
                role = current["role"]
                auth_provider = current["auth_provider"]
                is_active = current["is_active"]

                if "display_name" in payload:
                    if not isinstance(payload["display_name"], str) or not payload["display_name"].strip():
                        return self._json({"error": "display_name muss nicht-leerer String sein"}, status=400)
                    display_name = payload["display_name"].strip()
                    if len(display_name) > 140:
                        return self._json({"error": "display_name zu lang (max 140)"}, status=400)

                if "email" in payload:
                    if not isinstance(payload["email"], str) or not payload["email"].strip():
                        return self._json({"error": "email muss nicht-leerer String sein"}, status=400)
                    email = payload["email"].strip().lower()
                    if len(email) > 254:
                        return self._json({"error": "email zu lang (max 254)"}, status=400)
                    if not re.fullmatch(r"[^@\s]+@[^@\s]+\.[^@\s]+", email):
                        return self._json({"error": "email ungueltig"}, status=400)

                if "role" in payload:
                    if payload["role"] not in VALID_APP_USER_ROLES:
                        return self._json({"error": f"role muss in {sorted(VALID_APP_USER_ROLES)} sein"}, status=400)
                    role = payload["role"]

                if "auth_provider" in payload:
                    normalized_provider = str(payload["auth_provider"]).strip().lower()
                    if normalized_provider not in VALID_APP_AUTH_PROVIDERS:
                        return self._json({"error": f"auth_provider muss in {sorted(VALID_APP_AUTH_PROVIDERS)} sein"}, status=400)
                    auth_provider = normalized_provider

                if "is_active" in payload:
                    if not isinstance(payload["is_active"], bool):
                        return self._json({"error": "is_active muss Boolean sein"}, status=400)
                    is_active = 1 if payload["is_active"] else 0

                try:
                    conn.execute(
                        """
                        UPDATE app_users
                        SET display_name = ?, email = ?, role = ?, auth_provider = ?, is_active = ?
                        WHERE id = ?
                        """,
                        (display_name, email, role, auth_provider, is_active, user_id),
                    )
                except DB_INTEGRITY_ERRORS:
                    return self._json({"error": "email existiert bereits"}, status=409)

                conn.commit()
                updated = conn.execute(
                    """
                    SELECT id, display_name, email, role, auth_provider, is_active, created_at
                    FROM app_users
                    WHERE id = ?
                    """,
                    (user_id,),
                ).fetchone()
            return self._json(
                {
                    "id": updated["id"],
                    "display_name": updated["display_name"],
                    "email": updated["email"],
                    "role": updated["role"],
                    "auth_provider": updated["auth_provider"],
                    "is_active": bool(updated["is_active"]),
                    "created_at": updated["created_at"],
                }
            )

        if len(parts) == 4 and parts[:3] == ["api", "security", "measures"]:
            try:
                measure_id = int(parts[3])
            except ValueError:
                return self._json({"error": "ungueltige measure_id"}, status=400)

            try:
                payload = self._read_json()
            except json.JSONDecodeError:
                return self._json({"error": "ungueltiges JSON"}, status=400)
            except ValueError as exc:
                return self._json({"error": str(exc)}, status=413)

            tenant_id = payload.get("tenant_id")
            status_value = payload.get("status")

            if not isinstance(tenant_id, int):
                return self._json({"error": "tenant_id muss Integer sein"}, status=400)
            if status_value not in VALID_STATUS:
                return self._json({"error": f"status muss in {sorted(VALID_STATUS)} sein"}, status=400)

            with self._connect() as conn:
                tenant_exists = conn.execute("SELECT 1 FROM tenants WHERE id = ?", (tenant_id,)).fetchone()
                measure_exists = conn.execute("SELECT 1 FROM security_measures WHERE id = ?", (measure_id,)).fetchone()
                if not tenant_exists or not measure_exists:
                    return self._json({"error": "tenant oder measure nicht gefunden"}, status=404)

                conn.execute(
                    """
                    INSERT INTO tenant_security_status (tenant_id, measure_id, status, updated_at)
                    VALUES (?, ?, ?, CURRENT_TIMESTAMP)
                    ON CONFLICT(tenant_id, measure_id)
                    DO UPDATE SET status = excluded.status, updated_at = CURRENT_TIMESTAMP
                    """,
                    (tenant_id, measure_id, status_value),
                )
                conn.commit()

                row = conn.execute(
                    """
                    SELECT
                      ts.tenant_id,
                      ts.measure_id,
                      sm.measure_key AS key,
                      ts.status,
                      ts.updated_at
                    FROM tenant_security_status ts
                    JOIN security_measures sm ON sm.id = ts.measure_id
                    WHERE ts.tenant_id = ? AND ts.measure_id = ?
                    """,
                    (tenant_id, measure_id),
                ).fetchone()
            return self._json(dict(row))

        if parts == ["api", "services", "visibility"]:
            try:
                payload = self._read_json()
            except json.JSONDecodeError:
                return self._json({"error": "ungueltiges JSON"}, status=400)
            except ValueError as exc:
                return self._json({"error": str(exc)}, status=413)

            tenant_id = payload.get("tenant_id")
            service_id = payload.get("service_id")
            hidden = payload.get("hidden")

            if not isinstance(tenant_id, int):
                return self._json({"error": "tenant_id muss Integer sein"}, status=400)
            if not isinstance(service_id, str) or not service_id.strip():
                return self._json({"error": "service_id muss nicht-leerer String sein"}, status=400)
            if not isinstance(hidden, bool):
                return self._json({"error": "hidden muss Boolean sein"}, status=400)

            service_id = service_id.strip()
            if len(service_id) > 120:
                return self._json({"error": "service_id zu lang (max 120)"}, status=400)

            with self._connect() as conn:
                if not self._get_tenant_name(conn, tenant_id, include_archived=True):
                    return self._json({"error": "tenant nicht gefunden"}, status=404)

                if hidden:
                    conn.execute(
                        """
                        INSERT INTO tenant_service_visibility (tenant_id, service_id, is_hidden, updated_at)
                        VALUES (?, ?, 1, CURRENT_TIMESTAMP)
                        ON CONFLICT(tenant_id, service_id)
                        DO UPDATE SET is_hidden = 1, updated_at = CURRENT_TIMESTAMP
                        """,
                        (tenant_id, service_id),
                    )
                else:
                    conn.execute(
                        "DELETE FROM tenant_service_visibility WHERE tenant_id = ? AND service_id = ?",
                        (tenant_id, service_id),
                    )
                conn.commit()

                visibility = self._get_service_visibility(conn, tenant_id)
            return self._json(
                {
                    "tenant_id": tenant_id,
                    "service_id": service_id,
                    "hidden": hidden,
                    "hidden_service_ids": visibility["hidden_service_ids"],
                }
            )

        return self._json({"error": "unbekannter PATCH Endpoint"}, status=404)


def main() -> None:
    parser = argparse.ArgumentParser(description="Kundenportal API (PostgreSQL)")
    parser.add_argument("--host", default=os.getenv("APP_HOST", "127.0.0.1"))
    parser.add_argument("--port", type=int, default=int(os.getenv("PORT", "8000")))
    parser.add_argument("--database-url", default=os.getenv("DATABASE_URL", ""))
    parser.add_argument("--directory", default=".")
    args = parser.parse_args()

    serve_dir = Path(args.directory).resolve()
    csv_path = (serve_dir / "security.csv").resolve()

    database_url = str(args.database_url or "").strip()
    if not database_url:
        raise RuntimeError("DATABASE_URL ist erforderlich. Bitte --database-url oder Umgebungsvariable DATABASE_URL setzen.")
    db_dialect = "postgres"
    db_target = database_url
    db_label = "PostgreSQL (DATABASE_URL)"

    init_db(db_dialect, db_target, csv_path)

    PortalApiHandler.db_dialect = db_dialect
    PortalApiHandler.db_target = db_target
    handler = lambda *h_args, **h_kwargs: PortalApiHandler(*h_args, directory=str(serve_dir), **h_kwargs)
    server = ThreadingHTTPServer((args.host, args.port), handler)

    print(f"[kundenportal-api] running on http://{args.host}:{args.port}")
    print(f"[kundenportal-api] database: {db_label}")
    print(f"[kundenportal-api] serving files from: {serve_dir}")
    try:
        server.serve_forever()
    except KeyboardInterrupt:
        pass
    finally:
        server.server_close()


if __name__ == "__main__":
    main()
