[
  {
    "path": ".dockerignore",
    "content": ".git\n.github\n.claude\n.DS_Store\nch-ui\nch-ui-server.pid\ndata\ndist\ntmp\nnode_modules\nui/node_modules\nui/.svelte-kit\nui/dist\nui/.DS_Store\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug.yml",
    "content": "name: Bug Report\ndescription: Found something that doesn't work as expected?\nbody:\n  - type: dropdown\n    id: os\n    attributes:\n      label: Operating System\n      description: What OS are you running CH-UI on?\n      options:\n        - Linux\n        - macOS\n        - Windows\n        - Other\n    validations:\n      required: true\n  - type: dropdown\n    id: arch\n    attributes:\n      label: Architecture\n      description: What architecture?\n      options:\n        - x86_64 (amd64)\n        - ARM64 (aarch64 / Apple Silicon)\n        - Other\n    validations:\n      required: true\n  - type: textarea\n    id: repro\n    attributes:\n      label: How did you encounter the bug?\n      description: How can this bug be reproduced? Please provide steps to reproduce.\n      placeholder: |-\n        1. Start CH-UI with...\n        2. Go to...\n        3. Click on...\n    validations:\n      required: true\n  - type: textarea\n    id: expected\n    attributes:\n      label: What did you expect?\n      description: What was supposed to happen?\n    validations:\n      required: true\n  - type: textarea\n    id: actual\n    attributes:\n      label: Actual Result\n      description: What actually happened?\n    validations:\n      required: true\n  - type: textarea\n    id: version\n    attributes:\n      label: Version\n      description: What version of CH-UI are you using?\n      placeholder: e.g. 2.0.0\n    validations:\n      required: true\n  - type: textarea\n    id: logs\n    attributes:\n      label: Logs / Error Output\n      description: Any relevant logs or error messages from the terminal?\n      render: shell\n    validations:\n      required: false\n  - type: markdown\n    attributes:\n      value: |-\n        ### All done, now, just submit the issue and I will do my best to take care of it!\n    validations:\n      required: false\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature.yml",
    "content": "name: Feature Request\ndescription: Tell us about something ch-UI doesn't do yet, but should!\nbody:\n  - type: textarea\n    id: idea\n    attributes:\n      label: Idea Statement\n      description: Which is the feature you would like to see implemented?\n      placeholder: |-\n        I want to be able to do anything I want, whenever I want. Because my ideas are the best.\n    validations:\n      required: true\n  - type: textarea\n    id: expected\n    attributes:\n      label: Feature implementation brainstorm\n      description: All your ideas are welcome, let's brainstorm together.\n      placeholder: |-\n        Create the next big feature that will all our problems.\n    validations:\n      required: false\n  - type: markdown\n    attributes:\n      value: |-\n        ## Thanks 🙏\n    validations:\n      required: false\n"
  },
  {
    "path": ".github/workflows/release.yml",
    "content": "name: Release\n\non:\n  push:\n    tags:\n      - 'v*'\n\nenv:\n  REGISTRY: ghcr.io\n  IMAGE_NAME: ${{ github.repository }}\n\npermissions:\n  contents: write\n  packages: write\n\nconcurrency:\n  group: release-${{ github.ref }}\n  cancel-in-progress: false\n\njobs:\n  release:\n    runs-on: ubuntu-latest\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v4\n\n      - name: Setup Go\n        uses: actions/setup-go@v5\n        with:\n          go-version: '1.24'\n\n      - name: Setup Bun\n        uses: oven-sh/setup-bun@v2\n\n      - name: Build frontend\n        run: make build-frontend\n\n      - name: Extract version\n        id: version\n        run: echo \"version=${GITHUB_REF_NAME}\" >> \"$GITHUB_OUTPUT\"\n\n      - name: Cross-compile binaries\n        env:\n          VERSION: ${{ steps.version.outputs.version }}\n          COMMIT: ${{ github.sha }}\n          DATE: ${{ github.event.head_commit.timestamp }}\n        run: |\n          LDFLAGS=\"-s -w -X main.Version=${VERSION} -X main.Commit=${COMMIT} -X main.BuildDate=${DATE}\"\n\n          CGO_ENABLED=0 GOOS=linux  GOARCH=amd64 go build -ldflags \"${LDFLAGS}\" -o dist/ch-ui-linux-amd64 .\n          CGO_ENABLED=0 GOOS=linux  GOARCH=arm64 go build -ldflags \"${LDFLAGS}\" -o dist/ch-ui-linux-arm64 .\n          CGO_ENABLED=0 GOOS=darwin GOARCH=amd64 go build -ldflags \"${LDFLAGS}\" -o dist/ch-ui-darwin-amd64 .\n          CGO_ENABLED=0 GOOS=darwin GOARCH=arm64 go build -ldflags \"${LDFLAGS}\" -o dist/ch-ui-darwin-arm64 .\n\n      - name: Alpine smoke test (linux-amd64)\n        run: |\n          chmod +x dist/ch-ui-linux-amd64\n          docker run --rm -v \"$PWD/dist:/dist:ro\" alpine:3.20 /dist/ch-ui-linux-amd64 version\n\n      - name: Generate checksums\n        working-directory: dist\n        run: |\n          sha256sum ch-ui-* > checksums.txt\n          cat checksums.txt\n\n      - name: Create or update GitHub Release\n        env:\n          GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n          VERSION: ${{ steps.version.outputs.version }}\n        run: |\n          set -euo pipefail\n\n          TAG=\"${VERSION}\"\n          TITLE=\"${TAG}\"\n          REPO=\"${{ github.repository }}\"\n          NOTES_FILE=\"$(mktemp)\"\n\n          cat > \"${NOTES_FILE}\" <<EOF\n          ## Install\n\n          Download the binary for your platform and run it:\n\n          \\`\\`\\`bash\n          # Linux (amd64)\n          curl -L -o ch-ui https://github.com/${REPO}/releases/download/${TAG}/ch-ui-linux-amd64\n          chmod +x ch-ui\n          sudo install -m 755 ch-ui /usr/local/bin/ch-ui\n          ch-ui\n          \\`\\`\\`\n\n          If you don't want a global install, run it as \\`./ch-ui\\`.\n\n          ## Verify checksum\n\n          \\`\\`\\`bash\n          sha256sum -c checksums.txt\n          \\`\\`\\`\n          EOF\n\n          if gh release view \"${TAG}\" >/dev/null 2>&1; then\n            gh release edit \"${TAG}\" --title \"${TITLE}\" --notes-file \"${NOTES_FILE}\"\n          else\n            gh release create \"${TAG}\" --title \"${TITLE}\" --notes-file \"${NOTES_FILE}\"\n          fi\n\n          gh release upload \"${TAG}\" \\\n            dist/ch-ui-linux-amd64 \\\n            dist/ch-ui-linux-arm64 \\\n            dist/ch-ui-darwin-amd64 \\\n            dist/ch-ui-darwin-arm64 \\\n            dist/checksums.txt \\\n            --clobber\n\n      - name: Set up Docker Buildx\n        uses: docker/setup-buildx-action@v3\n\n      - name: Verify GHCR credentials\n        env:\n          GHCR_PAT: ${{ secrets.GHCR_PAT }}\n        run: |\n          if [ -z \"${GHCR_PAT}\" ]; then\n            echo \"GHCR_PAT secret is required to publish ghcr.io/${{ github.repository }} images.\"\n            exit 1\n          fi\n\n      - name: Log in to GHCR\n        uses: docker/login-action@v3\n        with:\n          registry: ${{ env.REGISTRY }}\n          username: ${{ github.actor }}\n          password: ${{ secrets.GHCR_PAT }}\n\n      - name: Build and push Docker image\n        uses: docker/build-push-action@v6\n        with:\n          context: .\n          file: ./Dockerfile\n          push: true\n          platforms: linux/amd64,linux/arm64\n          tags: |\n            ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.version.outputs.version }}\n            ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest\n          build-args: |\n            VERSION=${{ steps.version.outputs.version }}\n            COMMIT=${{ github.sha }}\n            BUILD_DATE=${{ github.event.head_commit.timestamp }}\n"
  },
  {
    "path": ".gitignore",
    "content": "# Logs\n*.log\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\npnpm-debug.log*\nlerna-debug.log*\n\n# Dependencies\nnode_modules\n\n# Build output\ndist/\ndist-ssr\n*.local\n.claude\n\n# Frontend build output (embedded into Go binary)\nui/dist/\n!ui/dist/.gitkeep\n\n# Editor directories and files\n.vscode/*\n!.vscode/extensions.json\n.idea\n.DS_Store\n*.suo\n*.ntvs*\n*.njsproj\n*.sln\n*.sw?\n\n# Environment\n.env\n.env.local\n!.env.example\n\n# Docker local data\n.clickhouse_local_data\n\n# SQLite database\n/data\n*.db\n*.db-shm\n*.db-wal\n\n\n# Go binary (built by Makefile)\nch-ui\n\n.claude\n\n\n.gocache/\n\n# License tool secrets\nlicense/private.pem\nlicense/*.json\nlicense/*.log\nlicense/go.*\nlicense/licensetool\nlicense/README.md\nlicense/main.go\n\nposts.md\nch-ui-server.pid\nCLAUDE.md"
  },
  {
    "path": ".gitpod.yml",
    "content": "image: gitpod/workspace-full\n\ntasks:\n  - name: ClickHouse\n    init: docker pull clickhouse/clickhouse-server:latest\n    command: |\n      docker run -d --rm \\\n        --name clickhouse \\\n        -p 8123:8123 \\\n        -p 9000:9000 \\\n        clickhouse/clickhouse-server:latest\n      echo \"ClickHouse running on port 8123\"\n\n  - name: CH-UI\n    init: |\n      curl -L -o ch-ui https://github.com/caioricciuti/ch-ui/releases/latest/download/ch-ui-linux-amd64\n      chmod +x ch-ui\n    command: |\n      # Wait for ClickHouse to be ready\n      echo \"Waiting for ClickHouse...\"\n      while ! curl -s http://localhost:8123/ping > /dev/null 2>&1; do sleep 1; done\n      echo \"ClickHouse is up. Starting CH-UI...\"\n      CLICKHOUSE_URL=http://localhost:8123 ./ch-ui\n\nports:\n  - port: 3488\n    onOpen: open-browser\n    visibility: public\n  - port: 8123\n    onOpen: ignore\n  - port: 9000\n    onOpen: ignore"
  },
  {
    "path": "Dockerfile",
    "content": "# syntax=docker/dockerfile:1.7\n\nFROM oven/bun:1.2.23 AS ui-builder\nWORKDIR /src/ui\n\nCOPY ui/package.json ui/bun.lock ./\nRUN bun install --frozen-lockfile\n\nCOPY ui/ ./\nENV CHUI_VITE_MINIFY=true \\\n    CHUI_VITE_REPORT_COMPRESSED=false\nRUN bun run build\n\nFROM golang:1.25-alpine AS go-builder\nWORKDIR /src\n\nARG VERSION=dev\nARG COMMIT=none\nARG BUILD_DATE=unknown\nARG TARGETOS=linux\nARG TARGETARCH=amd64\n\nCOPY go.mod go.sum ./\nRUN go mod download\n\nCOPY . .\nCOPY --from=ui-builder /src/ui/dist ./ui/dist\n\nRUN CGO_ENABLED=0 GOOS=${TARGETOS} GOARCH=${TARGETARCH} \\\n    go build -trimpath -ldflags \"-s -w -X main.Version=${VERSION} -X main.Commit=${COMMIT} -X main.BuildDate=${BUILD_DATE}\" -o /out/ch-ui .\n\nFROM alpine:3.20 AS runtime\nRUN addgroup -S chui && adduser -S -G chui chui \\\n    && apk add --no-cache ca-certificates tzdata \\\n    && mkdir -p /app/data \\\n    && chown -R chui:chui /app\n\nWORKDIR /app\nCOPY --from=go-builder /out/ch-ui /usr/local/bin/ch-ui\n\nENV DATABASE_PATH=/app/data/ch-ui.db\n\nEXPOSE 3488\nVOLUME [\"/app/data\"]\n\nUSER chui\nENTRYPOINT [\"ch-ui\", \"server\"]\n"
  },
  {
    "path": "LICENSE.md",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to the Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by the Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding any notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   Copyright 2024-2026 Caio Ricciuti\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "Makefile",
    "content": "# CH-UI Makefile\n# Single binary: server + agent + embedded frontend\n\nVERSION ?= $(shell cat VERSION 2>/dev/null || git describe --tags --always --dirty 2>/dev/null || echo \"dev\")\nCOMMIT  ?= $(shell git rev-parse --short HEAD 2>/dev/null || echo \"none\")\nDATE    ?= $(shell date -u '+%Y-%m-%dT%H:%M:%SZ')\nLDFLAGS = -s -w \\\n\t-X main.Version=$(VERSION) \\\n\t-X main.Commit=$(COMMIT) \\\n\t-X main.BuildDate=$(DATE)\n\nBINARY = ch-ui\n\n.PHONY: app build rebuild from-scratch build-frontend build-go dev test clean tidy vet help\n\n## app: Build frontend + Go binary (production-ready)\napp: build-frontend build-go\n\n## build: Build everything (frontend + Go binary)\nbuild: app\n\n## rebuild: Clean artifacts, then build everything\nrebuild:\n\t$(MAKE) clean\n\t$(MAKE) build\n\n## from-scratch: Alias for rebuild\nfrom-scratch: rebuild\n\n## build-frontend: Build the React frontend\nbuild-frontend:\n\tcd ui && bun install\n\t@cd ui && (CHUI_VITE_MINIFY=true CHUI_VITE_REPORT_COMPRESSED=false bun run build || \\\n\t\t(echo \"Frontend build was killed; retrying with low-memory profile (no minify)...\" && \\\n\t\tCHUI_VITE_MINIFY=false CHUI_VITE_REPORT_COMPRESSED=false bun run build))\n\n## build-go: Build just the Go binary (skip frontend rebuild)\nbuild-go:\n\tCGO_ENABLED=0 go build -ldflags \"$(LDFLAGS)\" -o $(BINARY) .\n\n## dev: Start the server in dev mode (expects Vite running on :5173)\ndev:\n\tgo run -ldflags \"$(LDFLAGS)\" . server --dev\n\n## test: Run all Go tests\ntest:\n\tgo test ./... -v -count=1\n\n## clean: Remove build artifacts\nclean:\n\trm -f $(BINARY)\n\trm -rf ui/dist/\n\n## tidy: Clean up Go modules\ntidy:\n\tgo mod tidy\n\n## vet: Run go vet\nvet:\n\tgo vet ./...\n\n## help: Show this help message\nhelp:\n\t@echo \"Available targets:\"\n\t@grep -E '^## ' Makefile | sed 's/## /  /'\n"
  },
  {
    "path": "README.md",
    "content": "<p align=\"center\">\n  <img src=\"ui/src/assets/logo.png\" alt=\"CH-UI Logo\" width=\"88\" />\n</p>\n\n<h1 align=\"center\">CH-UI</h1>\n\n<p align=\"center\">\n  <strong>The open-source ClickHouse management platform.</strong><br/>\n  SQL editor, dashboards, AI copilot, data pipelines, models, and admin — all in one binary. Free.\n</p>\n\n<p align=\"center\">\n  <a href=\"https://github.com/caioricciuti/ch-ui/releases\"><img src=\"https://img.shields.io/github/v/release/caioricciuti/ch-ui?label=version\" alt=\"Version\" /></a>\n  <a href=\"https://github.com/caioricciuti/ch-ui/blob/main/LICENSE\"><img src=\"https://img.shields.io/badge/license-Apache%202.0-blue\" alt=\"License\" /></a>\n  <a href=\"https://github.com/caioricciuti/ch-ui/stargazers\"><img src=\"https://img.shields.io/github/stars/caioricciuti/ch-ui\" alt=\"Stars\" /></a>\n  <a href=\"https://github.com/caioricciuti/ch-ui/pkgs/container/ch-ui\"><img src=\"https://img.shields.io/badge/docker-ghcr.io-blue\" alt=\"Docker\" /></a>\n</p>\n\n---\n\n## Why CH-UI?\n\nMost ClickHouse tools give you a query box and call it a day. CH-UI gives you a full workspace — and almost everything is **free and open source**.\n\nDownload one binary. Run it. Get:\n\n- A multi-tab **SQL editor** with formatting, profiling, and streaming results\n- **Dashboards** with a drag-and-drop panel builder and multiple chart types\n- **Brain** — an AI assistant that understands your schema (OpenAI, Ollama, or any compatible provider)\n- **Data pipelines** — visual builder for Webhook, S3, Kafka, and DB sources into ClickHouse\n- **Models** — dbt-style SQL transformations with dependency graphs and scheduling\n- **Admin panel** — user management, connection management, provider configuration\n- **Saved queries**, **schema explorer**, **connection management**, and more\n\nNo Docker requirement. No external dependencies. No signup.\n\n---\n\n## Table of Contents\n\n- [Features (Free)](#features-free)\n- [Community vs Pro](#community-vs-pro)\n- [Quick Start](#quick-start)\n- [Quick Start (Docker)](#quick-start-docker)\n- [Architecture](#architecture)\n- [Remote ClickHouse (Tunnel)](#remote-clickhouse-tunnel)\n- [CLI Reference](#cli-reference)\n- [Configuration](#configuration)\n- [Production Checklist](#production-checklist)\n- [Troubleshooting](#troubleshooting)\n- [Development](#development)\n- [Upgrade](#upgrade)\n- [Legal](#legal)\n- [Contributing](#contributing)\n\n---\n\n## Features (Free)\n\nEverything below is included in the free Community edition under Apache 2.0.\n\n### SQL Editor\n\n- Multi-tab interface with persistent state\n- CodeMirror 6 with SQL syntax highlighting and autocomplete\n- Query formatting and beautification\n- Streaming results via SSE — no timeout on long queries\n- **Query cost estimation** — see estimated rows and parts to scan before running (like BigQuery's dry run)\n- Query profiling (pulls from `system.query_log`) with estimate vs actual accuracy comparison\n- Query plan analysis (EXPLAIN with parsed tree view)\n- Configurable max result rows and query timeout\n- Guardrails enforcement (query validation before execution)\n\n### Schema Explorer\n\n- Full database/table/column tree browser\n- Table data preview with pagination\n- Column type introspection\n- Search across databases and tables\n\n### Dashboards\n\n- Create unlimited dashboards\n- Drag-and-drop panel builder\n- Multiple chart types (line, bar, scatter, area, and more via uplot)\n- Time range selector with presets (1h, 24h, 7d, 30d, custom)\n- Timezone support\n- Auto-refresh control\n- Each panel runs its own SQL query against your ClickHouse\n\n### Brain (AI Assistant)\n\n- Chat with your data using natural language\n- Multi-chat support with full history persistence\n- **Provider support:** OpenAI, OpenAI-compatible APIs (Groq, Together, etc.), Ollama (local LLMs)\n- Admin-controlled model and provider activation\n- Schema-aware context (attach up to 10 tables as context per chat)\n- SQL artifact generation — run generated queries directly from chat\n- Brain skills (configurable system prompts/instructions)\n- Token usage tracking\n- Langfuse integration for LLM observability\n\n### Data Pipelines\n\n- Visual pipeline canvas (drag-and-drop with XyFlow)\n- **Source connectors:** Webhook (inbound HTTP), Database (SQL query), S3, Kafka (with SCRAM auth)\n- **Sink:** ClickHouse (native insert with configurable batch size)\n- Pipeline start/stop controls\n- Run history, metrics, and error tracking\n- Real-time monitoring (rows ingested, bytes, batches, errors)\n\n### Models (SQL Transformations)\n\n- dbt-style SQL models with `table`, `view`, and `incremental` materialization\n- Model dependency graph (DAG visualization)\n- Execution with dependency ordering\n- Run history and results tracking\n- Table engine configuration per model\n- Can be scheduled via the scheduler (Pro) or run manually\n\n### Saved Queries\n\n- Save queries with titles and descriptions\n- Sort by date, name, or query length\n- Filter, search, copy, and organize\n- Quick access from the sidebar\n\n### Admin Panel\n\n- User management (create, delete, assign roles)\n- ClickHouse user management (create users, update passwords, delete)\n- Connection management with multi-connection support\n- Brain provider and model configuration\n- Brain skill management\n- Langfuse integration settings\n- System statistics dashboard\n\n### Connections & Tunnel\n\n- Multi-connection support (manage multiple ClickHouse instances)\n- Secure WebSocket tunnel for remote ClickHouse access\n- Token-based agent authentication\n- Connection health monitoring\n- Install connector as OS service (`ch-ui service install`)\n\n### Other\n\n- Dark mode\n- Session-based authentication with rate limiting\n- Security headers (CSP, X-Frame-Options, etc.)\n- Health check endpoint (`/health`)\n- Self-update (`ch-ui update`)\n- Shell completion generation\n\n---\n\n## Community vs Pro\n\nAlmost everything is free. Pro adds enterprise governance and scheduling.\n\n| Capability | Community (Free) | Pro |\n|---|:---:|:---:|\n| SQL editor + explorer + formatting + profiling | **Yes** | Yes |\n| Saved queries | **Yes** | Yes |\n| Dashboards + panel builder | **Yes** | Yes |\n| Brain (AI assistant, multi-provider) | **Yes** | Yes |\n| Data pipelines (Webhook, S3, Kafka, DB) | **Yes** | Yes |\n| Models (SQL transformations, DAG) | **Yes** | Yes |\n| Admin panel + user management | **Yes** | Yes |\n| Multi-connection management | **Yes** | Yes |\n| Tunnel (remote ClickHouse) | **Yes** | Yes |\n| Scheduled query jobs + cron + history | - | **Yes** |\n| Governance (metadata, visual lineage graph, column-level lineage, access matrix) | - | **Yes** |\n| Policies + incidents + violations | - | **Yes** |\n| Alerting (SMTP, Resend, Brevo) | - | **Yes** |\n\nSee: [`docs/license.md`](docs/license.md)\n\n---\n\n## Quick Start\n\n### 1) Download\n\nLinux (amd64):\n```bash\ncurl -L -o ch-ui https://github.com/caioricciuti/ch-ui/releases/latest/download/ch-ui-linux-amd64\nchmod +x ch-ui\n```\n\nLinux (arm64):\n```bash\ncurl -L -o ch-ui https://github.com/caioricciuti/ch-ui/releases/latest/download/ch-ui-linux-arm64\nchmod +x ch-ui\n```\n\nmacOS (Apple Silicon):\n```bash\ncurl -L -o ch-ui https://github.com/caioricciuti/ch-ui/releases/latest/download/ch-ui-darwin-arm64\nchmod +x ch-ui\n```\n\nmacOS (Intel):\n```bash\ncurl -L -o ch-ui https://github.com/caioricciuti/ch-ui/releases/latest/download/ch-ui-darwin-amd64\nchmod +x ch-ui\n```\n\nOptional — verify checksum:\n```bash\ncurl -L -o checksums.txt https://github.com/caioricciuti/ch-ui/releases/latest/download/checksums.txt\nsha256sum -c checksums.txt --ignore-missing\n```\n\n### 2) Run\n\n```bash\nsudo install -m 755 ch-ui /usr/local/bin/ch-ui\nch-ui\n```\n\nOr just `./ch-ui` from the download folder.\n\nOpen `http://localhost:3488` and log in with your ClickHouse credentials.\n\n---\n\n## Quick Start (Docker)\n\n```bash\ndocker run --rm \\\n  -p 3488:3488 \\\n  -v ch-ui-data:/app/data \\\n  -e CLICKHOUSE_URL=http://host.docker.internal:8123 \\\n  ghcr.io/caioricciuti/ch-ui:latest\n```\n\n- On Linux, replace `host.docker.internal` with a host/IP reachable from the container.\n- Persisted state is stored in `/app/data/ch-ui.db` (volume: `ch-ui-data`).\n\n---\n\n## Architecture\n\nCH-UI ships as a single binary with two operating modes:\n- **`server`** — web app + API + WebSocket tunnel gateway (default)\n- **`connect`** — lightweight agent that exposes local ClickHouse over secure WebSocket\n\n```mermaid\nflowchart LR\n    U[\"Browser\"] --> S[\"CH-UI Server\\n(UI + API + Gateway)\"]\n    S <--> DB[\"SQLite\\n(state, settings, chats, dashboards)\"]\n    A[\"ch-ui connect\\n(Agent)\"] <--> S\n    A --> CH[\"ClickHouse\"]\n```\n\nFor local use, the server starts an embedded connector automatically against `localhost:8123`.\n\n**Tech stack:** Go backend (chi v5, SQLite WAL mode), Svelte 5 frontend (TypeScript, Vite, TailwindCSS), embedded at build time.\n\n---\n\n## Remote ClickHouse (Tunnel)\n\nConnect to ClickHouse instances running on other machines using the secure WebSocket tunnel.\n\n**Server (VM2):**\n```bash\nch-ui server --port 3488\n```\n\n**Agent (VM1, where ClickHouse runs):**\n```bash\nch-ui connect --url wss://your-ch-ui-domain/connect --key cht_your_tunnel_token\n```\n\n### Tunnel key management\n\nRun these on the server host:\n\n```bash\nch-ui tunnel create --name \"vm1-clickhouse\"   # Create connection + key\nch-ui tunnel list                              # List all connections\nch-ui tunnel show <connection-id>              # Show token + setup commands\nch-ui tunnel rotate <connection-id>            # Rotate token (old one invalidated)\nch-ui tunnel delete <connection-id>            # Delete connection\n```\n\n- Token can also be generated from the Admin UI.\n- Agent only needs outbound access to the server's `/connect` endpoint.\n- Add `--takeover` to replace a stale agent session.\n- Install as OS service: `ch-ui service install --key cht_xxx --url wss://host/connect`\n\nFor full hardening guide: [`docs/production-runbook.md`](docs/production-runbook.md)\n\n---\n\n## CLI Reference\n\n### Quick start commands\n\n```bash\nch-ui                     # Start server (local ClickHouse)\nch-ui server start --detach  # Start in background\nch-ui server status          # Check if running\nch-ui server stop            # Stop server\n```\n\n### Full command map\n\n| Command | Description |\n|---|---|\n| `ch-ui` / `ch-ui server` | Start web app + API + gateway |\n| `ch-ui connect` | Start tunnel agent next to ClickHouse |\n| `ch-ui tunnel create/list/show/rotate/delete` | Manage tunnel keys (server host) |\n| `ch-ui service install/start/stop/status/logs/uninstall` | Manage connector as OS service |\n| `ch-ui update` | Update to latest release |\n| `ch-ui version` | Print version |\n| `ch-ui completion bash/zsh/fish` | Generate shell completions |\n| `ch-ui uninstall` | Remove CH-UI from system |\n\n### Server flags\n\n| Flag | Default | Description |\n|---|---|---|\n| `--port, -p` | `3488` | HTTP port |\n| `--clickhouse-url` | `http://localhost:8123` | Local ClickHouse URL |\n| `--connection-name` | `Local ClickHouse` | Display name for local connection |\n| `--config, -c` | - | Path to `server.yaml` |\n| `--detach` | - | Run in background |\n| `--dev` | - | Development mode (proxy to Vite) |\n\n### Connect flags\n\n| Flag | Default | Description |\n|---|---|---|\n| `--url` | - | WebSocket tunnel URL (`wss://`) |\n| `--key` | - | Tunnel token (`cht_...`) |\n| `--clickhouse-url` | `http://localhost:8123` | Local ClickHouse |\n| `--config, -c` | - | Path to `config.yaml` |\n| `--detach` | - | Run in background |\n| `--takeover` | - | Replace stale agent session |\n\n---\n\n## Configuration\n\nCH-UI works without config files. You only need them for production defaults or service-managed startup.\n\n### Config file locations\n\n| File | macOS | Linux |\n|---|---|---|\n| `server.yaml` | `~/.config/ch-ui/server.yaml` | `/etc/ch-ui/server.yaml` |\n| `config.yaml` | `~/.config/ch-ui/config.yaml` | `/etc/ch-ui/config.yaml` |\n\n**Priority:** CLI flags > environment variables > config file > built-in defaults\n\n### Server config\n\n```yaml\nport: 3488\napp_url: https://ch-ui.yourcompany.com\ndatabase_path: /var/lib/ch-ui/ch-ui.db\nclickhouse_url: http://localhost:8123\nconnection_name: Local ClickHouse\napp_secret_key: \"change-this-in-production\"\nallowed_origins:\n  - https://ch-ui.yourcompany.com\n```\n\n| Key | Env var | Default | Description |\n|---|---|---|---|\n| `port` | `PORT` | `3488` | HTTP port |\n| `app_url` | `APP_URL` | `http://localhost:<port>` | Public URL for links and tunnel inference |\n| `database_path` | `DATABASE_PATH` | `./data/ch-ui.db` | SQLite database location |\n| `clickhouse_url` | `CLICKHOUSE_URL` | `http://localhost:8123` | Embedded local connection target |\n| `connection_name` | `CONNECTION_NAME` | `Local ClickHouse` | Display name for local connection |\n| `app_secret_key` | `APP_SECRET_KEY` | auto-generated | Session encryption key |\n| `allowed_origins` | `ALLOWED_ORIGINS` | empty | CORS allowlist (comma-separated in env) |\n| `tunnel_url` | `TUNNEL_URL` | derived from port | Tunnel endpoint advertised to agents |\n\n### Connector config\n\n```yaml\ntunnel_token: \"cht_your_token\"\nclickhouse_url: \"http://127.0.0.1:8123\"\ntunnel_url: \"wss://your-ch-ui-domain/connect\"\n```\n\n| Key | Env var | Default | Description |\n|---|---|---|---|\n| `tunnel_token` | `TUNNEL_TOKEN` | required | Auth key from `ch-ui tunnel create` |\n| `clickhouse_url` | `CLICKHOUSE_URL` | `http://localhost:8123` | Local ClickHouse |\n| `tunnel_url` | `TUNNEL_URL` | `ws://127.0.0.1:3488/connect` | Server gateway endpoint |\n\n### Changing the local ClickHouse URL\n\n```bash\n# CLI flag\nch-ui server --clickhouse-url http://127.0.0.1:8123\n\n# Environment variable\nCLICKHOUSE_URL=http://127.0.0.1:8123 ch-ui server\n\n# With custom connection name\nch-ui server --clickhouse-url http://127.0.0.1:8123 --connection-name \"My ClickHouse\"\n```\n\nThe login page also has a **Can't login?** button that shows setup guidance.\n\n---\n\n## Production Checklist\n\n- [ ] Set a strong `APP_SECRET_KEY`\n- [ ] Set `APP_URL` to your public HTTPS URL\n- [ ] Configure `ALLOWED_ORIGINS`\n- [ ] Put CH-UI behind a TLS reverse proxy (Nginx example: [`ch-ui.conf`](ch-ui.conf))\n- [ ] Ensure WebSocket upgrade support for `/connect`\n- [ ] Back up SQLite database regularly\n- [ ] Run connector as OS service on remote hosts\n\n### Backup and restore\n\n```bash\n# Backup\ncp /var/lib/ch-ui/ch-ui.db /var/backups/ch-ui-$(date +%F).db\n\n# Restore — stop server first, then replace the DB file\n```\n\n---\n\n## Troubleshooting\n\n### Port already in use\n\n```bash\nch-ui server status   # Check if already running\nch-ui server stop     # Stop the old process\n```\n\n### Can't log in\n\n- **Authentication failed** — wrong ClickHouse credentials\n- **Connection unavailable** — wrong URL or connector offline\n- **Too many attempts** — wait for retry window; fix URL first if needed\n\nClick **Can't login?** on the login page for guided recovery, or restart with:\n```bash\nch-ui server --clickhouse-url 'http://127.0.0.1:8123'\n```\n\nFull guide: [`docs/cant-login.md`](docs/cant-login.md)\n\n### Connector auth fails (`invalid token`)\n\n- Verify you copied the latest `cht_...` token\n- Check with `ch-ui tunnel list`\n- Rotate with `ch-ui tunnel rotate <connection-id>`\n\n### WebSocket fails behind proxy\n\nYour proxy must forward upgrades on `/connect`:\n- `Upgrade` and `Connection: upgrade` headers\n- Long read/send timeouts\n- Buffering disabled for tunnel path\n\n### Health check\n\n```bash\ncurl http://localhost:3488/health\n```\n\n---\n\n## Development\n\nRequirements: Go 1.25+, Bun\n\n```bash\ngit clone https://github.com/caioricciuti/ch-ui.git\ncd ch-ui\nmake build    # Full production build (frontend + Go binary)\n./ch-ui\n```\n\nDev mode (two terminals):\n```bash\nmake dev                              # Terminal 1: Go server\ncd ui && bun install && bun run dev   # Terminal 2: Vite dev server\n```\n\nUseful targets: `make build` | `make test` | `make vet` | `make clean` | `make rebuild`\n\n---\n\n## Upgrade\n\n```bash\nch-ui update\n```\n\nDownloads the latest release for your OS/arch, verifies checksum, and replaces the binary.\n\n---\n\n## Legal\n\n- Core license: [`LICENSE`](LICENSE) (Apache 2.0)\n- Licensing details: [`docs/license.md`](docs/license.md)\n- Terms: [`docs/legal/terms-of-service.md`](docs/legal/terms-of-service.md)\n- Privacy: [`docs/legal/privacy-policy.md`](docs/legal/privacy-policy.md)\n\n---\n\n## Contributing\n\nIssues and PRs are welcome.\n\nWhen contributing, please include:\n- Reproduction steps (for bugs)\n- Expected behavior\n- Migration notes (if schema/API changed)\n- Screenshots (for UI changes)\n\n\n# Gitpod One-Click Demo\n\n## Try it now\n\n[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/caioricciuti/ch-ui)\n\n> Launches a full CH-UI + ClickHouse environment in your browser. No install required.\n> Free tier: 50 hours/month, no credit card - Via Gitpod (https://www.gitpod.io/)"
  },
  {
    "path": "VERSION",
    "content": "v2.0.23"
  },
  {
    "path": "ch-ui.conf",
    "content": "upstream ch-ui {\n    server 127.0.0.1:3488;\n    keepalive 64;\n}\n\n# ─── HTTP → HTTPS redirect ──────────────────────────────────────────────────\nserver {\n    listen 80;\n    listen [::]:80;\n    server_name ch-ui.example.com;\n\n    # Let certbot handle ACME challenges\n    location /.well-known/acme-challenge/ {\n        root /var/www/certbot;\n    }\n\n    location / {\n        return 301 https://$host$request_uri;\n    }\n}\n\n# ─── HTTPS ───────────────────────────────────────────────────────────────────\nserver {\n    listen 443 ssl;\n    listen [::]:443 ssl;\n    server_name ch-ui.example.com;\n\n    # SSL certificates — managed by certbot\n    ssl_certificate /etc/letsencrypt/live/ch-ui.example.com/fullchain.pem;\n    ssl_certificate_key /etc/letsencrypt/live/ch-ui.example.com/privkey.pem;\n    include /etc/letsencrypt/options-ssl-nginx.conf;\n    ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;\n\n    # Security headers\n    add_header X-Frame-Options \"SAMEORIGIN\" always;\n    add_header X-Content-Type-Options \"nosniff\" always;\n    add_header X-XSS-Protection \"1; mode=block\" always;\n    add_header Referrer-Policy \"strict-origin-when-cross-origin\" always;\n\n    # Gzip (static assets are already embedded in the Go binary)\n    gzip on;\n    gzip_vary on;\n    gzip_min_length 1024;\n    gzip_proxied any;\n    gzip_types text/plain text/css text/xml text/javascript application/javascript application/json application/xml;\n\n    # Agent tunnel WebSocket — keep alive indefinitely\n    location = /connect {\n        proxy_pass http://ch-ui;\n        proxy_http_version 1.1;\n        proxy_set_header Host $host;\n        proxy_set_header X-Real-IP $remote_addr;\n        proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n        proxy_set_header X-Forwarded-Proto $scheme;\n        proxy_set_header Upgrade $http_upgrade;\n        proxy_set_header Connection \"upgrade\";\n        proxy_read_timeout 86400;\n        proxy_send_timeout 86400;\n        proxy_buffering off;\n    }\n\n    # Health check — no access log noise\n    location = /health {\n        proxy_pass http://ch-ui;\n        proxy_http_version 1.1;\n        proxy_set_header Host $host;\n        access_log off;\n    }\n\n    # Agent binary downloads — large files\n    location /download/ {\n        proxy_pass http://ch-ui;\n        proxy_http_version 1.1;\n        proxy_set_header Host $host;\n        proxy_set_header X-Real-IP $remote_addr;\n        proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n        proxy_set_header X-Forwarded-Proto $scheme;\n        proxy_buffering off;\n        proxy_read_timeout 300;\n    }\n\n    # Everything else — API, frontend, install script\n    location / {\n        proxy_pass http://ch-ui;\n        proxy_http_version 1.1;\n        proxy_set_header Host $host;\n        proxy_set_header X-Real-IP $remote_addr;\n        proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n        proxy_set_header X-Forwarded-Proto $scheme;\n        proxy_read_timeout 300;\n        proxy_send_timeout 300;\n    }\n}\n"
  },
  {
    "path": "cmd/connect.go",
    "content": "package cmd\n\nimport (\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"os\"\n\t\"os/exec\"\n\t\"os/signal\"\n\t\"path/filepath\"\n\t\"strconv\"\n\t\"strings\"\n\t\"syscall\"\n\n\t\"github.com/caioricciuti/ch-ui/connector\"\n\t\"github.com/caioricciuti/ch-ui/connector/config\"\n\t\"github.com/caioricciuti/ch-ui/connector/service\"\n\t\"github.com/caioricciuti/ch-ui/connector/ui\"\n\t\"github.com/spf13/cobra\"\n)\n\nvar (\n\tconnectURL        string\n\tconnectKey        string\n\tconnectCHURL      string\n\tconnectDetach     bool\n\tconnectTakeover   bool\n\tconnectConfigPath string\n)\n\nvar connectCmd = &cobra.Command{\n\tUse:   \"connect\",\n\tShort: \"Connect to a CH-UI server as a tunnel\",\n\tLong: `Connect this machine's ClickHouse instance to a remote CH-UI server\nvia a secure WebSocket tunnel. Queries executed in the CH-UI dashboard\nwill be forwarded through this tunnel to your local ClickHouse.`,\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tu := ui.New(false, false, false, false)\n\t\tu.Logo(\"\")\n\n\t\t// Build CLI config from flags\n\t\tcliCfg := &config.Config{}\n\t\tif cmd.Flags().Changed(\"key\") {\n\t\t\tcliCfg.Token = connectKey\n\t\t}\n\t\tif cmd.Flags().Changed(\"url\") {\n\t\t\tcliCfg.TunnelURL = connectURL\n\t\t}\n\t\tif cmd.Flags().Changed(\"clickhouse-url\") {\n\t\t\tcliCfg.ClickHouseURL = connectCHURL\n\t\t}\n\t\tcliCfg.Takeover = connectTakeover\n\n\t\tcfg, err := config.Load(connectConfigPath, cliCfg)\n\t\tif err != nil {\n\t\t\tu.Error(\"Configuration error: %v\", err)\n\t\t\tif strings.Contains(strings.ToLower(err.Error()), \"tunnel token is required\") {\n\t\t\t\tu.Info(\"Create a tunnel token on your CH-UI server host with:\")\n\t\t\t\tu.Info(\"  ch-ui tunnel create --name <connection-name>\")\n\t\t\t\tu.Info(\"Then retry connect with --key <token> (or set TUNNEL_TOKEN).\")\n\t\t\t}\n\t\t\treturn err\n\t\t}\n\n\t\tif connectDetach {\n\t\t\tpid, logPath, err := startDetached()\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"failed to start in background: %w\", err)\n\t\t\t}\n\t\t\tu.Success(\"Started in background (PID %d)\", pid)\n\t\t\tif logPath != \"\" {\n\t\t\t\tu.Info(\"Logs: %s\", logPath)\n\t\t\t}\n\t\t\treturn nil\n\t\t}\n\n\t\tif !connectTakeover {\n\t\t\tif running, err := service.New().IsRunning(); err == nil && running {\n\t\t\t\tu.Info(\"CH-UI service is already running on this machine\")\n\t\t\t\tu.Info(\"Use 'ch-ui service status' to inspect it\")\n\t\t\t\tu.Info(\"Use 'ch-ui service stop' to stop it before running connect\")\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\n\t\treleasePID, err := acquirePIDLock()\n\t\tif err != nil {\n\t\t\tu.DiagnosticError(ui.ErrorTypeConfig, \"Local host\",\n\t\t\t\terr.Error(),\n\t\t\t\t[]string{\n\t\t\t\t\t\"Check current state with: ch-ui service status\",\n\t\t\t\t\t\"If this is stale, remove it and retry: rm -f \" + pidFilePath(),\n\t\t\t\t},\n\t\t\t)\n\t\t\treturn err\n\t\t}\n\t\tdefer releasePID()\n\n\t\tconn := connector.New(cfg, u)\n\n\t\tsigCh := make(chan os.Signal, 1)\n\t\tsignal.Notify(sigCh, os.Interrupt, syscall.SIGTERM)\n\t\tgo func() {\n\t\t\t<-sigCh\n\t\t\tu.Info(\"Shutting down...\")\n\t\t\tconn.Shutdown()\n\t\t}()\n\n\t\tif err := conn.Run(); err != nil {\n\t\t\tu.Error(\"Connection error: %v\", err)\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t},\n}\n\nfunc init() {\n\tconnectCmd.Flags().StringVar(&connectURL, \"url\", \"\", \"CH-UI server WebSocket URL (ws:// or wss://)\")\n\tconnectCmd.Flags().StringVar(&connectKey, \"key\", \"\", \"Tunnel token (cht_..., create on server with: ch-ui tunnel create --name <name>)\")\n\tconnectCmd.Flags().StringVar(&connectCHURL, \"clickhouse-url\", \"\", \"ClickHouse HTTP URL (default: http://localhost:8123)\")\n\tconnectCmd.Flags().BoolVar(&connectDetach, \"detach\", false, \"Run in background\")\n\tconnectCmd.Flags().BoolVar(&connectTakeover, \"takeover\", false, \"Replace an existing active session\")\n\tconnectCmd.Flags().StringVarP(&connectConfigPath, \"config\", \"c\", \"\", \"Path to config file\")\n\trootCmd.AddCommand(connectCmd)\n}\n\n// ── Detach ──────────────────────────────────────────────────────────────────\n\nfunc startDetached() (int, string, error) {\n\texe, err := os.Executable()\n\tif err != nil {\n\t\treturn 0, \"\", err\n\t}\n\texe, err = filepath.EvalSymlinks(exe)\n\tif err != nil {\n\t\treturn 0, \"\", err\n\t}\n\n\targs := sanitizeDetachedArgs(os.Args[1:])\n\tif len(args) == 0 || args[0] != \"connect\" {\n\t\treturn 0, \"\", fmt.Errorf(\"detach must be started from 'connect' command\")\n\t}\n\n\tlogDir := service.GetConfigDir()\n\tif err := os.MkdirAll(logDir, 0755); err != nil {\n\t\treturn 0, \"\", err\n\t}\n\tlogPath := filepath.Join(logDir, \"ch-ui-connect.log\")\n\n\tlogFile, err := os.OpenFile(logPath, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0600)\n\tif err != nil {\n\t\treturn 0, \"\", err\n\t}\n\n\tcmd := exec.Command(exe, args...)\n\tcmd.Env = append(os.Environ(), \"CHUI_DETACHED=1\")\n\tcmd.Stdout = logFile\n\tcmd.Stderr = logFile\n\tsetProcessDetachedAttr(cmd)\n\n\tif err := cmd.Start(); err != nil {\n\t\tlogFile.Close()\n\t\treturn 0, \"\", err\n\t}\n\t_ = logFile.Close()\n\treturn cmd.Process.Pid, logPath, nil\n}\n\nfunc sanitizeDetachedArgs(in []string) []string {\n\targs := make([]string, 0, len(in))\n\tfor _, a := range in {\n\t\tif a == \"--detach\" || strings.HasPrefix(a, \"--detach=\") {\n\t\t\tcontinue\n\t\t}\n\t\targs = append(args, a)\n\t}\n\treturn args\n}\n\n// ── PID guard ───────────────────────────────────────────────────────────────\n\nfunc pidFilePath() string {\n\treturn filepath.Join(service.GetConfigDir(), \"ch-ui.pid\")\n}\n\nfunc acquirePIDLock() (func(), error) {\n\tpidPath := pidFilePath()\n\tif err := os.MkdirAll(filepath.Dir(pidPath), 0755); err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create state dir: %w\", err)\n\t}\n\n\tfor attempts := 0; attempts < 2; attempts++ {\n\t\tf, err := os.OpenFile(pidPath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0600)\n\t\tif err == nil {\n\t\t\tpid := os.Getpid()\n\t\t\t_, writeErr := f.WriteString(strconv.Itoa(pid))\n\t\t\tcloseErr := f.Close()\n\t\t\tif writeErr != nil {\n\t\t\t\t_ = os.Remove(pidPath)\n\t\t\t\treturn nil, fmt.Errorf(\"failed to write pid file: %w\", writeErr)\n\t\t\t}\n\t\t\tif closeErr != nil {\n\t\t\t\t_ = os.Remove(pidPath)\n\t\t\t\treturn nil, fmt.Errorf(\"failed to finalize pid file: %w\", closeErr)\n\t\t\t}\n\t\t\treturn func() {\n\t\t\t\tcurrentPID, readErr := readPIDFile(pidPath)\n\t\t\t\tif readErr == nil && currentPID != os.Getpid() {\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\t_ = os.Remove(pidPath)\n\t\t\t}, nil\n\t\t}\n\n\t\tif !errors.Is(err, os.ErrExist) {\n\t\t\treturn nil, fmt.Errorf(\"failed to create pid file: %w\", err)\n\t\t}\n\n\t\texistingPID, readErr := readPIDFile(pidPath)\n\t\tif readErr != nil {\n\t\t\t_ = os.Remove(pidPath)\n\t\t\tcontinue\n\t\t}\n\t\tif isProcessRunning(existingPID) {\n\t\t\treturn nil, fmt.Errorf(\"another ch-ui connect process is already running (PID %d)\", existingPID)\n\t\t}\n\n\t\t_ = os.Remove(pidPath)\n\t}\n\n\treturn nil, fmt.Errorf(\"failed to acquire lock at %s\", pidPath)\n}\n\nfunc readPIDFile(path string) (int, error) {\n\traw, err := os.ReadFile(path)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tpid, err := strconv.Atoi(strings.TrimSpace(string(raw)))\n\tif err != nil || pid <= 0 {\n\t\treturn 0, fmt.Errorf(\"invalid pid file\")\n\t}\n\treturn pid, nil\n}\n\n// ── Helpers ─────────────────────────────────────────────────────────────────\n\nfunc copyFile(src, dst string) error {\n\tin, err := os.Open(src)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer in.Close()\n\tout, err := os.Create(dst)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer out.Close()\n\tif _, err := io.Copy(out, in); err != nil {\n\t\treturn err\n\t}\n\treturn out.Close()\n}\n\nfunc fileExists(path string) bool {\n\t_, err := os.Stat(path)\n\treturn err == nil\n}\n"
  },
  {
    "path": "cmd/connect_detach_unix.go",
    "content": "//go:build darwin || linux\n\npackage cmd\n\nimport (\n\t\"os/exec\"\n\t\"syscall\"\n)\n\nfunc setProcessDetachedAttr(cmd *exec.Cmd) {\n\tcmd.SysProcAttr = &syscall.SysProcAttr{\n\t\tSetsid: true,\n\t}\n}\n"
  },
  {
    "path": "cmd/connect_detach_windows.go",
    "content": "//go:build windows\n\npackage cmd\n\nimport \"os/exec\"\n\nfunc setProcessDetachedAttr(cmd *exec.Cmd) {\n\t// No-op on windows.\n}\n"
  },
  {
    "path": "cmd/connect_process_unix.go",
    "content": "//go:build darwin || linux\n\npackage cmd\n\nimport (\n\t\"os\"\n\t\"syscall\"\n)\n\nfunc isProcessRunning(pid int) bool {\n\tif pid <= 0 {\n\t\treturn false\n\t}\n\tp, err := os.FindProcess(pid)\n\tif err != nil {\n\t\treturn false\n\t}\n\treturn p.Signal(syscall.Signal(0)) == nil\n}\n"
  },
  {
    "path": "cmd/connect_process_windows.go",
    "content": "//go:build windows\n\npackage cmd\n\nfunc isProcessRunning(pid int) bool {\n\treturn pid > 0\n}\n"
  },
  {
    "path": "cmd/root.go",
    "content": "package cmd\n\nimport (\n\t\"bufio\"\n\t\"fmt\"\n\t\"os\"\n\t\"strings\"\n\n\t\"github.com/spf13/cobra\"\n)\n\nvar rootCmd = &cobra.Command{\n\tUse:   \"ch-ui\",\n\tShort: \"CH-UI - ClickHouse UI and management platform\",\n\tLong:  \"CH-UI is a single binary that serves a ClickHouse management platform for local and remote deployments.\",\n}\n\nfunc init() {\n\tloadEnvFile(\".env\")\n}\n\nfunc Execute() {\n\tif len(os.Args) == 1 {\n\t\trootCmd.SetArgs([]string{\"server\"})\n\t}\n\n\tif err := rootCmd.Execute(); err != nil {\n\t\tfmt.Fprintln(os.Stderr, err)\n\t\tos.Exit(1)\n\t}\n}\n\n// loadEnvFile reads a .env file and sets environment variables.\n// Existing env vars are NOT overwritten (real env takes precedence).\n// Silently does nothing if the file doesn't exist.\nfunc loadEnvFile(path string) {\n\tf, err := os.Open(path)\n\tif err != nil {\n\t\treturn\n\t}\n\tdefer f.Close()\n\n\tscanner := bufio.NewScanner(f)\n\tfor scanner.Scan() {\n\t\tline := strings.TrimSpace(scanner.Text())\n\t\tif line == \"\" || strings.HasPrefix(line, \"#\") {\n\t\t\tcontinue\n\t\t}\n\t\tkey, val, ok := strings.Cut(line, \"=\")\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\t\tkey = strings.TrimSpace(key)\n\t\tval = strings.TrimSpace(val)\n\t\t// Strip surrounding quotes\n\t\tif len(val) >= 2 && ((val[0] == '\"' && val[len(val)-1] == '\"') || (val[0] == '\\'' && val[len(val)-1] == '\\'')) {\n\t\t\tval = val[1 : len(val)-1]\n\t\t}\n\t\t// Don't overwrite existing env vars\n\t\tif os.Getenv(key) == \"\" {\n\t\t\tos.Setenv(key, val)\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "cmd/server.go",
    "content": "package cmd\n\nimport (\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io/fs\"\n\t\"log/slog\"\n\t\"net\"\n\t\"os\"\n\t\"os/exec\"\n\t\"os/signal\"\n\t\"path/filepath\"\n\t\"strconv\"\n\t\"strings\"\n\t\"syscall\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/embedded\"\n\t\"github.com/caioricciuti/ch-ui/internal/server\"\n\t\"github.com/caioricciuti/ch-ui/internal/version\"\n\t\"github.com/spf13/cobra\"\n)\n\n// FrontendFS holds the embedded frontend filesystem, set by main before Execute().\nvar FrontendFS fs.FS\n\nvar (\n\tserverPort           int\n\tdevMode              bool\n\tserverClickHouse     string\n\tserverConnectionName string\n\tserverDetach         bool\n\tserverConfig         string\n\tserverPIDFile        string\n\tserverStopTimeout    time.Duration\n\trestartDetach        bool\n)\n\nvar serverCmd = &cobra.Command{\n\tUse:   \"server\",\n\tShort: \"Start the CH-UI server\",\n\tLong:  \"Start the CH-UI HTTP server that serves the API, frontend, and tunnel gateway.\",\n\tPersistentPreRun: func(cmd *cobra.Command, args []string) {\n\t\tserverPIDFile = resolvePIDFile(serverPIDFile)\n\t},\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\treturn runServer(cmd)\n\t},\n}\n\nvar serverStartCmd = &cobra.Command{\n\tUse:   \"start\",\n\tShort: \"Start the CH-UI server\",\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\treturn runServer(cmd)\n\t},\n}\n\nvar serverStopCmd = &cobra.Command{\n\tUse:   \"stop\",\n\tShort: \"Stop the CH-UI server\",\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tstopped, err := stopServer(serverPIDFile, serverStopTimeout)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif stopped {\n\t\t\tfmt.Println(\"CH-UI server stopped\")\n\t\t}\n\t\treturn nil\n\t},\n}\n\nvar serverStatusCmd = &cobra.Command{\n\tUse:   \"status\",\n\tShort: \"Show CH-UI server status\",\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tpid, running, err := getRunningServerPID(serverPIDFile)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif running {\n\t\t\tfmt.Printf(\"CH-UI server is running (PID %d)\\n\", pid)\n\t\t\tfmt.Printf(\"PID file: %s\\n\", serverPIDFile)\n\t\t\treturn nil\n\t\t}\n\n\t\taddr := fmt.Sprintf(\"127.0.0.1:%d\", serverPort)\n\t\tif isTCPPortOpen(addr) {\n\t\t\tfmt.Printf(\"CH-UI server PID file not found, but port %d is in use.\\n\", serverPort)\n\t\t\tfmt.Printf(\"Another process may be listening on %s.\\n\", addr)\n\t\t\treturn nil\n\t\t}\n\n\t\tfmt.Println(\"CH-UI server is not running\")\n\t\treturn nil\n\t},\n}\n\nvar serverRestartCmd = &cobra.Command{\n\tUse:   \"restart\",\n\tShort: \"Restart the CH-UI server\",\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t_, err := stopServer(serverPIDFile, serverStopTimeout)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif restartDetach {\n\t\t\tstartArgs := buildServerStartArgs(cmd)\n\t\t\tpid, logPath, err := startDetachedServer(startArgs)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"failed to restart in background: %w\", err)\n\t\t\t}\n\t\t\tfmt.Printf(\"CH-UI server restarted in background (PID %d)\\n\", pid)\n\t\t\tif logPath != \"\" {\n\t\t\t\tfmt.Printf(\"Logs: %s\\n\", logPath)\n\t\t\t}\n\t\t\treturn nil\n\t\t}\n\n\t\tserverDetach = false\n\t\treturn runServer(cmd)\n\t},\n}\n\nfunc init() {\n\tpf := serverCmd.PersistentFlags()\n\tpf.IntVarP(&serverPort, \"port\", \"p\", 3488, \"Port to listen on\")\n\tpf.BoolVar(&devMode, \"dev\", false, \"Enable development mode (proxy to Vite)\")\n\tpf.StringVar(&serverClickHouse, \"clickhouse-url\", \"\", \"Local ClickHouse HTTP URL for the embedded connection\")\n\tpf.StringVar(&serverConnectionName, \"connection-name\", \"\", \"Display name for the embedded local connection\")\n\tpf.StringVarP(&serverConfig, \"config\", \"c\", \"\", \"Path to config file\")\n\tpf.StringVar(&serverPIDFile, \"pid-file\", \"ch-ui-server.pid\", \"Path to server PID file\")\n\tpf.DurationVar(&serverStopTimeout, \"stop-timeout\", 10*time.Second, \"Graceful stop timeout\")\n\n\tserverCmd.Flags().BoolVar(&serverDetach, \"detach\", false, \"Run server in background\")\n\tserverStartCmd.Flags().BoolVar(&serverDetach, \"detach\", false, \"Run server in background\")\n\n\tserverRestartCmd.Flags().BoolVar(&restartDetach, \"detach\", true, \"Run restarted server in background\")\n\n\tserverCmd.AddCommand(serverStartCmd, serverStopCmd, serverStatusCmd, serverRestartCmd)\n\trootCmd.AddCommand(serverCmd)\n}\n\nfunc runServer(cmd *cobra.Command) error {\n\tif serverDetach {\n\t\tstartArgs := buildServerStartArgs(cmd)\n\t\tpid, logPath, err := startDetachedServer(startArgs)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to start in background: %w\", err)\n\t\t}\n\t\tfmt.Printf(\"CH-UI server started in background (PID %d)\\n\", pid)\n\t\tif logPath != \"\" {\n\t\t\tfmt.Printf(\"Logs: %s\\n\", logPath)\n\t\t}\n\t\treturn nil\n\t}\n\n\tif err := preparePIDFileForStart(serverPIDFile); err != nil {\n\t\treturn err\n\t}\n\tif err := writeServerPIDFile(serverPIDFile, os.Getpid()); err != nil {\n\t\treturn fmt.Errorf(\"failed to write PID file %q: %w\", serverPIDFile, err)\n\t}\n\tdefer cleanupServerPIDFile(serverPIDFile, os.Getpid())\n\n\t// Load configuration\n\tcfg := config.Load(serverConfig)\n\n\t// Override with flags if provided\n\tif cmd.Flags().Changed(\"port\") {\n\t\tcfg.Port = serverPort\n\t}\n\tif cmd.Flags().Changed(\"clickhouse-url\") {\n\t\tcfg.ClickHouseURL = strings.TrimSpace(serverClickHouse)\n\t}\n\tif cmd.Flags().Changed(\"connection-name\") {\n\t\tcfg.ConnectionName = strings.TrimSpace(serverConnectionName)\n\t}\n\t// --dev flag is the authority for dev mode in the server command.\n\t// Without it, always serve the embedded frontend (production mode).\n\tcfg.DevMode = devMode\n\n\t// Setup structured logging\n\tlogLevel := slog.LevelInfo\n\tif cfg.DevMode {\n\t\tlogLevel = slog.LevelDebug\n\t}\n\tlogger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: logLevel}))\n\tslog.SetDefault(logger)\n\n\tslog.Info(\"Starting CH-UI server\",\n\t\t\"version\", version.Version,\n\t\t\"port\", cfg.Port,\n\t\t\"dev\", cfg.DevMode,\n\t)\n\n\tsecretSource, err := config.EnsureAppSecretKey(cfg)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to initialize app secret key: %w\", err)\n\t}\n\tif secretSource == config.SecretKeySourceGenerated {\n\t\tslog.Warn(\"APP_SECRET_KEY was not configured; generated a persisted secret key\",\n\t\t\t\"path\", config.AppSecretKeyPath(cfg.DatabasePath))\n\t} else if secretSource == config.SecretKeySourceFile {\n\t\tslog.Info(\"Loaded persisted app secret key\",\n\t\t\t\"path\", config.AppSecretKeyPath(cfg.DatabasePath))\n\t}\n\n\t// Initialize database\n\tdb, err := database.Open(cfg.DatabasePath)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to open database: %w\", err)\n\t}\n\tdefer db.Close()\n\n\tslog.Info(\"Database initialized\", \"path\", cfg.DatabasePath)\n\n\t// Load stored license from database\n\tif stored, err := db.GetSetting(\"license_json\"); err == nil && stored != \"\" {\n\t\tcfg.LicenseJSON = stored\n\t\tslog.Info(\"License loaded from database\")\n\t}\n\n\t// Create and start server\n\tsrv := server.New(cfg, db, FrontendFS)\n\n\t// Start embedded agent (connects to local ClickHouse if configured)\n\tea, err := embedded.Start(db, cfg.Port, cfg.ClickHouseURL, cfg.ConnectionName)\n\tif err != nil {\n\t\tslog.Warn(\"Failed to start embedded agent\", \"error\", err)\n\t}\n\n\t// Graceful shutdown\n\tctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)\n\tdefer stop()\n\n\terrCh := make(chan error, 1)\n\tgo func() {\n\t\terrCh <- srv.Start()\n\t}()\n\n\tselect {\n\tcase err := <-errCh:\n\t\tif ea != nil {\n\t\t\tea.Stop()\n\t\t}\n\t\treturn err\n\tcase <-ctx.Done():\n\t\tslog.Info(\"Shutting down server...\")\n\t\tif ea != nil {\n\t\t\tea.Stop()\n\t\t}\n\t\treturn srv.Shutdown(context.Background())\n\t}\n}\n\nfunc buildServerStartArgs(cmd *cobra.Command) []string {\n\targs := []string{\"server\"}\n\tif cmd.Flags().Changed(\"port\") {\n\t\targs = append(args, fmt.Sprintf(\"--port=%d\", serverPort))\n\t}\n\tif cmd.Flags().Changed(\"dev\") && devMode {\n\t\targs = append(args, \"--dev\")\n\t}\n\tif cmd.Flags().Changed(\"config\") && strings.TrimSpace(serverConfig) != \"\" {\n\t\targs = append(args, \"--config\", serverConfig)\n\t}\n\tif cmd.Flags().Changed(\"clickhouse-url\") && strings.TrimSpace(serverClickHouse) != \"\" {\n\t\targs = append(args, \"--clickhouse-url\", serverClickHouse)\n\t}\n\tif cmd.Flags().Changed(\"connection-name\") && strings.TrimSpace(serverConnectionName) != \"\" {\n\t\targs = append(args, \"--connection-name\", serverConnectionName)\n\t}\n\t// Always include absolute PID file path so the child process and\n\t// future update/restart commands can reliably locate the PID file\n\t// regardless of the caller's working directory.\n\targs = append(args, \"--pid-file\", serverPIDFile)\n\tif cmd.Flags().Changed(\"stop-timeout\") {\n\t\targs = append(args, fmt.Sprintf(\"--stop-timeout=%s\", serverStopTimeout.String()))\n\t}\n\treturn args\n}\n\nfunc startDetachedServer(args []string) (int, string, error) {\n\texe, err := os.Executable()\n\tif err != nil {\n\t\treturn 0, \"\", err\n\t}\n\texe, err = filepath.EvalSymlinks(exe)\n\tif err != nil {\n\t\treturn 0, \"\", err\n\t}\n\n\tif err := preparePIDFileForStart(serverPIDFile); err != nil {\n\t\treturn 0, \"\", err\n\t}\n\n\tlogPath := filepath.Join(\".\", \"ch-ui-server.log\")\n\tlogFile, err := os.OpenFile(logPath, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0600)\n\tif err != nil {\n\t\treturn 0, \"\", err\n\t}\n\n\tchild := exec.Command(exe, args...)\n\tchild.Env = append(os.Environ(), \"CHUI_DETACHED=1\")\n\tchild.Stdout = logFile\n\tchild.Stderr = logFile\n\tsetProcessDetachedAttr(child)\n\n\tif err := child.Start(); err != nil {\n\t\t_ = logFile.Close()\n\t\treturn 0, \"\", err\n\t}\n\t_ = logFile.Close()\n\n\tabsLog, _ := filepath.Abs(logPath)\n\treturn child.Process.Pid, absLog, nil\n}\n\nfunc stopServer(pidFile string, timeout time.Duration) (bool, error) {\n\tpid, running, err := getRunningServerPID(pidFile)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\tif !running {\n\t\taddr := fmt.Sprintf(\"127.0.0.1:%d\", serverPort)\n\t\tif isTCPPortOpen(addr) {\n\t\t\tfmt.Printf(\"CH-UI server PID file not found, but port %d is in use.\\n\", serverPort)\n\t\t\tfmt.Printf(\"This can happen after upgrading from an older build without PID management.\\n\")\n\t\t\tfmt.Printf(\"Stop that process once manually, then start with this build.\\n\")\n\t\t\tfmt.Printf(\"Expected PID file: %s\\n\", pidFile)\n\t\t\treturn false, nil\n\t\t}\n\t\tfmt.Println(\"CH-UI server is not running\")\n\t\treturn false, nil\n\t}\n\n\tproc, err := os.FindProcess(pid)\n\tif err != nil {\n\t\treturn false, fmt.Errorf(\"failed to locate process %d: %w\", pid, err)\n\t}\n\tif err := proc.Signal(syscall.SIGTERM); err != nil {\n\t\tif !processExists(pid) {\n\t\t\t_ = os.Remove(pidFile)\n\t\t\treturn false, nil\n\t\t}\n\t\treturn false, fmt.Errorf(\"failed to stop PID %d: %w\", pid, err)\n\t}\n\n\tdeadline := time.Now().Add(timeout)\n\tfor time.Now().Before(deadline) {\n\t\tif !processExists(pid) {\n\t\t\t_ = os.Remove(pidFile)\n\t\t\treturn true, nil\n\t\t}\n\t\ttime.Sleep(200 * time.Millisecond)\n\t}\n\n\treturn false, fmt.Errorf(\"timeout waiting for PID %d to stop (waited %s)\", pid, timeout.String())\n}\n\nfunc getRunningServerPID(pidFile string) (int, bool, error) {\n\tpid, err := readServerPIDFile(pidFile)\n\tif err != nil {\n\t\tif errors.Is(err, os.ErrNotExist) {\n\t\t\treturn 0, false, nil\n\t\t}\n\t\treturn 0, false, err\n\t}\n\n\tif processExists(pid) {\n\t\treturn pid, true, nil\n\t}\n\n\t_ = os.Remove(pidFile)\n\treturn 0, false, nil\n}\n\nfunc preparePIDFileForStart(pidFile string) error {\n\tpid, running, err := getRunningServerPID(pidFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif running {\n\t\treturn fmt.Errorf(\"server already running (PID %d); stop it first with `ch-ui server stop`\", pid)\n\t}\n\treturn nil\n}\n\nfunc writeServerPIDFile(pidFile string, pid int) error {\n\tif strings.TrimSpace(pidFile) == \"\" {\n\t\treturn fmt.Errorf(\"pid file path is empty\")\n\t}\n\tdir := filepath.Dir(pidFile)\n\tif dir != \".\" && dir != \"\" {\n\t\tif err := os.MkdirAll(dir, 0o755); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn os.WriteFile(pidFile, []byte(fmt.Sprintf(\"%d\\n\", pid)), 0o644)\n}\n\nfunc readServerPIDFile(pidFile string) (int, error) {\n\tdata, err := os.ReadFile(pidFile)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\traw := strings.TrimSpace(string(data))\n\tif raw == \"\" {\n\t\treturn 0, fmt.Errorf(\"pid file %q is empty\", pidFile)\n\t}\n\tpid, err := strconv.Atoi(raw)\n\tif err != nil || pid <= 0 {\n\t\treturn 0, fmt.Errorf(\"invalid PID in %q\", pidFile)\n\t}\n\treturn pid, nil\n}\n\nfunc cleanupServerPIDFile(pidFile string, expectedPID int) {\n\tpid, err := readServerPIDFile(pidFile)\n\tif err != nil {\n\t\treturn\n\t}\n\tif pid == expectedPID {\n\t\t_ = os.Remove(pidFile)\n\t}\n}\n\nfunc processExists(pid int) bool {\n\tif pid <= 0 {\n\t\treturn false\n\t}\n\tproc, err := os.FindProcess(pid)\n\tif err != nil {\n\t\treturn false\n\t}\n\terr = proc.Signal(syscall.Signal(0))\n\tif err == nil {\n\t\treturn true\n\t}\n\tif errors.Is(err, syscall.EPERM) {\n\t\treturn true\n\t}\n\tvar sysErr *os.SyscallError\n\tif errors.As(err, &sysErr) && errors.Is(sysErr.Err, syscall.EPERM) {\n\t\treturn true\n\t}\n\treturn false\n}\n\nfunc isTCPPortOpen(addr string) bool {\n\tconn, err := net.DialTimeout(\"tcp\", addr, 400*time.Millisecond)\n\tif err != nil {\n\t\treturn false\n\t}\n\t_ = conn.Close()\n\treturn true\n}\n\n// resolvePIDFile converts a relative PID file path to absolute so that\n// server detection works regardless of the caller's working directory.\nfunc resolvePIDFile(pidFile string) string {\n\tif filepath.IsAbs(pidFile) {\n\t\treturn pidFile\n\t}\n\tabs, err := filepath.Abs(pidFile)\n\tif err != nil {\n\t\treturn pidFile\n\t}\n\treturn abs\n}\n"
  },
  {
    "path": "cmd/service.go",
    "content": "package cmd\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"path/filepath\"\n\n\t\"github.com/caioricciuti/ch-ui/connector/config\"\n\t\"github.com/caioricciuti/ch-ui/connector/service\"\n\t\"github.com/spf13/cobra\"\n)\n\n// ── service (parent) ────────────────────────────────────────────────────────\n\nvar serviceCmd = &cobra.Command{\n\tUse:   \"service\",\n\tShort: \"Manage CH-UI as a system service\",\n}\n\n// ── service install ─────────────────────────────────────────────────────────\n\nvar (\n\tsvcInstallKey string\n\tsvcInstallURL string\n\tsvcInstallCH  string\n)\n\nvar serviceInstallCmd = &cobra.Command{\n\tUse:   \"install\",\n\tShort: \"Install CH-UI connect as a system service\",\n\tLong: `Install CH-UI as a system service (launchd on macOS, systemd on Linux)\nso it automatically connects to the server on boot.`,\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tsvc := service.New()\n\n\t\tif svc.IsInstalled() {\n\t\t\tfmt.Println(\"Service is already installed\")\n\t\t\tfmt.Println(\"Use 'ch-ui service restart' to restart, or 'ch-ui service uninstall' first\")\n\t\t\treturn nil\n\t\t}\n\n\t\t// Resolve current binary\n\t\tcurrentBin, err := os.Executable()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to determine current binary path: %w\", err)\n\t\t}\n\t\tcurrentBin, err = filepath.EvalSymlinks(currentBin)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to resolve binary path: %w\", err)\n\t\t}\n\n\t\t// Copy binary to service location if needed\n\t\tif currentBin != service.BinaryPath {\n\t\t\tfmt.Printf(\"Copying binary to %s...\\n\", service.BinaryPath)\n\t\t\tif err := copyFile(currentBin, service.BinaryPath); err != nil {\n\t\t\t\treturn fmt.Errorf(\"failed to copy binary: %w (try: sudo cp %s %s)\", err, currentBin, service.BinaryPath)\n\t\t\t}\n\t\t\tif err := os.Chmod(service.BinaryPath, 0755); err != nil {\n\t\t\t\treturn fmt.Errorf(\"failed to set binary permissions: %w\", err)\n\t\t\t}\n\t\t\tfmt.Printf(\"Binary installed at %s\\n\", service.BinaryPath)\n\t\t}\n\n\t\t// Create config file\n\t\tconfigPath := service.GetConfigPath()\n\t\tif svcInstallKey != \"\" {\n\t\t\tconfigDir := service.GetConfigDir()\n\t\t\tif err := os.MkdirAll(configDir, 0755); err != nil {\n\t\t\t\treturn fmt.Errorf(\"failed to create config directory: %w\", err)\n\t\t\t}\n\n\t\t\tchURL := svcInstallCH\n\t\t\tif chURL == \"\" {\n\t\t\t\tchURL = config.Defaults.ClickHouseURL\n\t\t\t}\n\t\t\ttURL := svcInstallURL\n\t\t\tif tURL == \"\" {\n\t\t\t\ttURL = config.Defaults.TunnelURL\n\t\t\t}\n\n\t\t\tconfigContent := fmt.Sprintf(`# CH-UI Configuration\ntunnel_token: \"%s\"\nclickhouse_url: \"%s\"\ntunnel_url: \"%s\"\n`, svcInstallKey, chURL, tURL)\n\n\t\t\tif err := os.WriteFile(configPath, []byte(configContent), 0600); err != nil {\n\t\t\t\treturn fmt.Errorf(\"failed to write config file: %w\", err)\n\t\t\t}\n\t\t\tfmt.Printf(\"Configuration saved to %s\\n\", configPath)\n\t\t} else if !fileExists(configPath) {\n\t\t\treturn fmt.Errorf(\"no config file found at %s and no --key provided\\n\\nUsage:\\n  ch-ui service install --key <token> --url <server-url>\", configPath)\n\t\t}\n\n\t\t// Install the service\n\t\tfmt.Println(\"Installing service...\")\n\t\tif err := svc.Install(configPath); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to install service: %w\", err)\n\t\t}\n\n\t\tfmt.Println(\"Service installed and started\")\n\t\tfmt.Println(\"  Check status: ch-ui service status\")\n\t\tfmt.Println(\"  View logs:    ch-ui service logs -f\")\n\t\treturn nil\n\t},\n}\n\n// ── service uninstall ───────────────────────────────────────────────────────\n\nvar (\n\tsvcUninstallPurge bool\n\tsvcUninstallForce bool\n)\n\nvar serviceUninstallCmd = &cobra.Command{\n\tUse:   \"uninstall\",\n\tShort: \"Uninstall the CH-UI service\",\n\tLong:  \"Stop and remove the system service. Use --purge to also remove the binary and config files.\",\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tsvc := service.New()\n\n\t\tif !svc.IsInstalled() && !svcUninstallForce {\n\t\t\tfmt.Println(\"Service is not installed\")\n\t\t\treturn nil\n\t\t}\n\n\t\tfmt.Println(\"Stopping service...\")\n\t\t_ = svc.Stop()\n\n\t\tfmt.Println(\"Removing service configuration...\")\n\t\tif err := svc.Uninstall(); err != nil {\n\t\t\tif !svcUninstallForce {\n\t\t\t\treturn fmt.Errorf(\"failed to uninstall service: %w\", err)\n\t\t\t}\n\t\t\tfmt.Printf(\"Warning: failed to uninstall service: %v (continuing with --force)\\n\", err)\n\t\t}\n\n\t\tfmt.Println(\"Service uninstalled\")\n\n\t\tif svcUninstallPurge {\n\t\t\tif fileExists(service.BinaryPath) {\n\t\t\t\tfmt.Printf(\"Removing binary %s...\\n\", service.BinaryPath)\n\t\t\t\tif err := os.Remove(service.BinaryPath); err != nil {\n\t\t\t\t\tfmt.Printf(\"Warning: failed to remove binary: %v\\n\", err)\n\t\t\t\t} else {\n\t\t\t\t\tfmt.Println(\"Binary removed\")\n\t\t\t\t}\n\t\t\t}\n\t\t\tconfigDir := service.GetConfigDir()\n\t\t\tif fileExists(configDir) {\n\t\t\t\tfmt.Printf(\"Removing config directory %s...\\n\", configDir)\n\t\t\t\tif err := os.RemoveAll(configDir); err != nil {\n\t\t\t\t\tfmt.Printf(\"Warning: failed to remove config directory: %v\\n\", err)\n\t\t\t\t} else {\n\t\t\t\t\tfmt.Println(\"Configuration removed\")\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t},\n}\n\n// ── service start/stop/restart/status/logs ──────────────────────────────────\n\nvar serviceStartCmd = &cobra.Command{\n\tUse: \"start\", Short: \"Start the service\",\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tif err := service.New().Start(); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(\"Service started\")\n\t\treturn nil\n\t},\n}\n\nvar serviceStopCmd = &cobra.Command{\n\tUse: \"stop\", Short: \"Stop the service\",\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tif err := service.New().Stop(); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(\"Service stopped\")\n\t\treturn nil\n\t},\n}\n\nvar serviceRestartCmd = &cobra.Command{\n\tUse: \"restart\", Short: \"Restart the service\",\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tif err := service.New().Restart(); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(\"Service restarted\")\n\t\treturn nil\n\t},\n}\n\nvar serviceStatusCmd = &cobra.Command{\n\tUse:   \"status\",\n\tShort: \"Show service status\",\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tsvc := service.New()\n\t\tif !svc.IsInstalled() {\n\t\t\tfmt.Println(\"Service is not installed\")\n\t\t\tfmt.Println(\"Install with: ch-ui service install --key <token> --url <server-url>\")\n\t\t\treturn nil\n\t\t}\n\t\tstatus, err := svc.Status()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to get service status: %w\", err)\n\t\t}\n\t\trunning, _ := svc.IsRunning()\n\t\tfmt.Println()\n\t\tfmt.Printf(\"  Service:    %s\\n\", service.ServiceName)\n\t\tfmt.Printf(\"  Status:     %s\\n\", status)\n\t\tfmt.Printf(\"  Running:    %v\\n\", running)\n\t\tfmt.Printf(\"  Config:     %s\\n\", service.GetConfigPath())\n\t\tif logPath := svc.GetLogPath(); logPath != \"\" {\n\t\t\tfmt.Printf(\"  Logs:       %s\\n\", logPath)\n\t\t}\n\t\tfmt.Printf(\"  Platform:   %s\\n\", svc.Platform())\n\t\tfmt.Println()\n\t\treturn nil\n\t},\n}\n\nvar (\n\tsvcLogsFollow bool\n\tsvcLogsLines  int\n)\n\nvar serviceLogsCmd = &cobra.Command{\n\tUse: \"logs\", Short: \"View service logs\",\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\treturn service.New().Logs(svcLogsFollow, svcLogsLines)\n\t},\n}\n\n// ── init ────────────────────────────────────────────────────────────────────\n\nfunc init() {\n\t// Install flags\n\tserviceInstallCmd.Flags().StringVar(&svcInstallKey, \"key\", \"\", \"Tunnel token (cht_...)\")\n\tserviceInstallCmd.Flags().StringVar(&svcInstallURL, \"url\", \"\", \"CH-UI server WebSocket URL\")\n\tserviceInstallCmd.Flags().StringVar(&svcInstallCH, \"clickhouse-url\", \"\", \"ClickHouse HTTP URL\")\n\n\t// Uninstall flags\n\tserviceUninstallCmd.Flags().BoolVar(&svcUninstallPurge, \"purge\", false, \"Also remove binary and config files\")\n\tserviceUninstallCmd.Flags().BoolVar(&svcUninstallForce, \"force\", false, \"Force uninstall even if errors occur\")\n\n\t// Logs flags\n\tserviceLogsCmd.Flags().BoolVarP(&svcLogsFollow, \"follow\", \"f\", false, \"Follow log output\")\n\tserviceLogsCmd.Flags().IntVarP(&svcLogsLines, \"lines\", \"n\", 50, \"Number of log lines to show\")\n\n\t// Wire up\n\tserviceCmd.AddCommand(serviceInstallCmd, serviceUninstallCmd, serviceStartCmd, serviceStopCmd, serviceRestartCmd, serviceStatusCmd, serviceLogsCmd)\n\trootCmd.AddCommand(serviceCmd)\n}\n"
  },
  {
    "path": "cmd/tunnel.go",
    "content": "package cmd\n\nimport (\n\t\"errors\"\n\t\"fmt\"\n\t\"net/url\"\n\t\"os\"\n\t\"strings\"\n\n\tserverconfig \"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/license\"\n\t\"github.com/spf13/cobra\"\n)\n\nvar (\n\ttunnelConfigPath  string\n\ttunnelDBPath      string\n\ttunnelURLOverride string\n\n\ttunnelCreateName string\n\ttunnelShowToken  bool\n\n\ttunnelDeleteForce bool\n)\n\nvar tunnelCmd = &cobra.Command{\n\tUse:   \"tunnel\",\n\tShort: \"Manage tunnel keys for remote ClickHouse agents\",\n\tLong: `Create and manage tunnel connection keys in this CH-UI server database.\nRun these commands on the server host (VM where CH-UI server stores its SQLite DB)\nto bootstrap remote agents from other machines (VM2, VM3, ...).`,\n}\n\nvar tunnelCreateCmd = &cobra.Command{\n\tUse:   \"create\",\n\tShort: \"Create a new tunnel connection and token\",\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tname := strings.TrimSpace(tunnelCreateName)\n\t\tif name == \"\" {\n\t\t\treturn errors.New(\"connection name is required (use --name)\")\n\t\t}\n\n\t\tdb, cfg, err := openTunnelDB()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer db.Close()\n\n\t\ttoken := license.GenerateTunnelToken()\n\t\tid, err := db.CreateConnection(name, token, false)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"create connection: %w\", err)\n\t\t}\n\n\t\tconn, err := db.GetConnectionByID(id)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"connection created but failed to load: %w\", err)\n\t\t}\n\t\tif conn == nil {\n\t\t\treturn errors.New(\"connection created but failed to load: not found\")\n\t\t}\n\n\t\tprintTunnelConnectionInfo(cfg, *conn)\n\t\treturn nil\n\t},\n}\n\nvar tunnelListCmd = &cobra.Command{\n\tUse:   \"list\",\n\tShort: \"List tunnel connections\",\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tdb, _, err := openTunnelDB()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer db.Close()\n\n\t\tconns, err := db.GetConnections()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"list connections: %w\", err)\n\t\t}\n\n\t\tif len(conns) == 0 {\n\t\t\tfmt.Println(\"No tunnel connections found.\")\n\t\t\tfmt.Println(\"Create one with: ch-ui tunnel create --name <connection-name>\")\n\t\t\treturn nil\n\t\t}\n\n\t\tfmt.Printf(\"%-36s  %-22s  %-12s  %-8s  %-35s\\n\", \"ID\", \"NAME\", \"STATUS\", \"EMBEDDED\", \"TOKEN\")\n\t\tfor _, c := range conns {\n\t\t\ttoken := maskToken(c.TunnelToken)\n\t\t\tif tunnelShowToken {\n\t\t\t\ttoken = c.TunnelToken\n\t\t\t}\n\t\t\tembedded := \"no\"\n\t\t\tif c.IsEmbedded {\n\t\t\t\tembedded = \"yes\"\n\t\t\t}\n\t\t\tfmt.Printf(\"%-36s  %-22s  %-12s  %-8s  %-35s\\n\",\n\t\t\t\tc.ID,\n\t\t\t\ttruncate(c.Name, 22),\n\t\t\t\tc.Status,\n\t\t\t\tembedded,\n\t\t\t\ttoken,\n\t\t\t)\n\t\t}\n\t\treturn nil\n\t},\n}\n\nvar tunnelShowCmd = &cobra.Command{\n\tUse:   \"show <connection-id>\",\n\tShort: \"Show token and setup instructions for a connection\",\n\tArgs:  cobra.ExactArgs(1),\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tdb, cfg, err := openTunnelDB()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer db.Close()\n\n\t\tconnID := strings.TrimSpace(args[0])\n\t\tconn, err := db.GetConnectionByID(connID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"load connection: %w\", err)\n\t\t}\n\t\tif conn == nil {\n\t\t\treturn fmt.Errorf(\"connection %q not found\", connID)\n\t\t}\n\n\t\tprintTunnelConnectionInfo(cfg, *conn)\n\t\treturn nil\n\t},\n}\n\nvar tunnelRotateCmd = &cobra.Command{\n\tUse:   \"rotate <connection-id>\",\n\tShort: \"Rotate (regenerate) tunnel token for a connection\",\n\tArgs:  cobra.ExactArgs(1),\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tdb, cfg, err := openTunnelDB()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer db.Close()\n\n\t\tconnID := strings.TrimSpace(args[0])\n\t\tconn, err := db.GetConnectionByID(connID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"load connection: %w\", err)\n\t\t}\n\t\tif conn == nil {\n\t\t\treturn fmt.Errorf(\"connection %q not found\", connID)\n\t\t}\n\n\t\tnewToken := license.GenerateTunnelToken()\n\t\tif err := db.UpdateConnectionToken(connID, newToken); err != nil {\n\t\t\treturn fmt.Errorf(\"rotate token: %w\", err)\n\t\t}\n\n\t\tupdated, err := db.GetConnectionByID(connID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"token rotated but failed to reload connection: %w\", err)\n\t\t}\n\t\tif updated == nil {\n\t\t\treturn errors.New(\"token rotated but failed to reload connection: not found\")\n\t\t}\n\n\t\tfmt.Println(\"Token rotated successfully. Previous token is now invalid.\")\n\t\tprintTunnelConnectionInfo(cfg, *updated)\n\t\treturn nil\n\t},\n}\n\nvar tunnelDeleteCmd = &cobra.Command{\n\tUse:   \"delete <connection-id>\",\n\tShort: \"Delete a tunnel connection\",\n\tArgs:  cobra.ExactArgs(1),\n\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\tdb, _, err := openTunnelDB()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer db.Close()\n\n\t\tconnID := strings.TrimSpace(args[0])\n\t\tconn, err := db.GetConnectionByID(connID)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"load connection: %w\", err)\n\t\t}\n\t\tif conn == nil {\n\t\t\treturn fmt.Errorf(\"connection %q not found\", connID)\n\t\t}\n\t\tif conn.IsEmbedded && !tunnelDeleteForce {\n\t\t\treturn errors.New(\"refusing to delete embedded connection without --force\")\n\t\t}\n\n\t\tif err := db.DeleteConnection(connID); err != nil {\n\t\t\treturn fmt.Errorf(\"delete connection: %w\", err)\n\t\t}\n\n\t\tfmt.Printf(\"Deleted connection %q (%s)\\n\", conn.Name, conn.ID)\n\t\treturn nil\n\t},\n}\n\nfunc init() {\n\ttunnelCmd.PersistentFlags().StringVarP(&tunnelConfigPath, \"config\", \"c\", \"\", \"Path to server config file\")\n\ttunnelCmd.PersistentFlags().StringVar(&tunnelDBPath, \"db\", \"\", \"Override SQLite database path\")\n\ttunnelCmd.PersistentFlags().StringVar(&tunnelURLOverride, \"url\", \"\", \"Public tunnel URL (ws:// or wss://) for setup output\")\n\n\ttunnelCreateCmd.Flags().StringVar(&tunnelCreateName, \"name\", \"\", \"Connection name (e.g. VM2 ClickHouse)\")\n\t_ = tunnelCreateCmd.MarkFlagRequired(\"name\")\n\n\ttunnelListCmd.Flags().BoolVar(&tunnelShowToken, \"show-token\", false, \"Show full tunnel tokens\")\n\n\ttunnelDeleteCmd.Flags().BoolVar(&tunnelDeleteForce, \"force\", false, \"Force delete embedded connection\")\n\n\ttunnelCmd.AddCommand(tunnelCreateCmd, tunnelListCmd, tunnelShowCmd, tunnelRotateCmd, tunnelDeleteCmd)\n\trootCmd.AddCommand(tunnelCmd)\n}\n\nfunc openTunnelDB() (*database.DB, *serverconfig.Config, error) {\n\tcfg := serverconfig.Load(tunnelConfigPath)\n\tif strings.TrimSpace(tunnelDBPath) != \"\" {\n\t\tcfg.DatabasePath = strings.TrimSpace(tunnelDBPath)\n\t}\n\n\tdb, err := database.Open(cfg.DatabasePath)\n\tif err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"open database %q: %w\", cfg.DatabasePath, err)\n\t}\n\treturn db, cfg, nil\n}\n\nfunc printTunnelConnectionInfo(cfg *serverconfig.Config, conn database.Connection) {\n\ttunnelURL := inferPublicTunnelURL(cfg)\n\ttoken := conn.TunnelToken\n\n\tconnectCmd := fmt.Sprintf(\"ch-ui connect --url %s --key %s --clickhouse-url http://localhost:8123\", tunnelURL, token)\n\tserviceCmd := fmt.Sprintf(\"ch-ui service install --url %s --key %s --clickhouse-url http://localhost:8123\", tunnelURL, token)\n\n\tfmt.Println()\n\tfmt.Printf(\"Connection:         %s\\n\", conn.Name)\n\tfmt.Printf(\"Connection ID:      %s\\n\", conn.ID)\n\tfmt.Printf(\"Tunnel Token:       %s\\n\", token)\n\tfmt.Println()\n\tfmt.Println(\"Use on the ClickHouse host:\")\n\tfmt.Printf(\"  %s\\n\", connectCmd)\n\tfmt.Println()\n\tfmt.Println(\"Run as service on the ClickHouse host:\")\n\tfmt.Printf(\"  %s\\n\", serviceCmd)\n\tfmt.Println()\n\n\tif isLoopbackTunnelURL(tunnelURL) {\n\t\tfmt.Fprintf(os.Stderr, \"Warning: tunnel URL %q is loopback/local. Set --url or APP_URL/TUNNEL_URL in server config for remote VM setup.\\n\", tunnelURL)\n\t}\n}\n\nfunc inferPublicTunnelURL(cfg *serverconfig.Config) string {\n\tif strings.TrimSpace(tunnelURLOverride) != \"\" {\n\t\treturn websocketConnectURL(strings.TrimSpace(tunnelURLOverride))\n\t}\n\n\tconfigTunnelURL := strings.TrimSpace(cfg.TunnelURL)\n\tif configTunnelURL != \"\" && !isLoopbackTunnelURL(configTunnelURL) {\n\t\treturn websocketConnectURL(configTunnelURL)\n\t}\n\n\tif appURL := strings.TrimSpace(cfg.AppURL); appURL != \"\" {\n\t\treturn websocketConnectURL(appURL)\n\t}\n\n\tif configTunnelURL != \"\" {\n\t\treturn websocketConnectURL(configTunnelURL)\n\t}\n\n\treturn \"ws://127.0.0.1:3488/connect\"\n}\n\nfunc websocketConnectURL(raw string) string {\n\tu, err := url.Parse(raw)\n\tif err != nil {\n\t\treturn raw\n\t}\n\tswitch strings.ToLower(u.Scheme) {\n\tcase \"http\":\n\t\tu.Scheme = \"ws\"\n\tcase \"https\":\n\t\tu.Scheme = \"wss\"\n\tcase \"ws\", \"wss\":\n\t\t// already websocket scheme\n\tdefault:\n\t\t// keep as-is (can still be validated by caller command later)\n\t}\n\n\tpath := strings.TrimRight(u.Path, \"/\")\n\tif path == \"\" {\n\t\tu.Path = \"/connect\"\n\t} else if !strings.HasSuffix(path, \"/connect\") {\n\t\tu.Path = path + \"/connect\"\n\t}\n\n\tu.RawQuery = \"\"\n\tu.Fragment = \"\"\n\treturn u.String()\n}\n\nfunc isLoopbackTunnelURL(raw string) bool {\n\tu, err := url.Parse(raw)\n\tif err != nil {\n\t\ts := strings.ToLower(raw)\n\t\treturn strings.Contains(s, \"127.0.0.1\") || strings.Contains(s, \"localhost\")\n\t}\n\thost := strings.ToLower(u.Hostname())\n\treturn host == \"127.0.0.1\" || host == \"localhost\" || host == \"::1\"\n}\n\nfunc maskToken(token string) string {\n\tif len(token) <= 12 {\n\t\treturn token\n\t}\n\treturn token[:8] + \"...\" + token[len(token)-4:]\n}\n\nfunc truncate(s string, max int) string {\n\tif max < 4 || len(s) <= max {\n\t\treturn s\n\t}\n\treturn s[:max-3] + \"...\"\n}\n"
  },
  {
    "path": "cmd/uninstall.go",
    "content": "package cmd\n\nimport (\n\t\"errors\"\n\t\"fmt\"\n\t\"os\"\n\t\"os/exec\"\n\t\"path/filepath\"\n\t\"runtime\"\n\t\"strings\"\n\t\"syscall\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/connector/service\"\n\tserverconfig \"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/spf13/cobra\"\n)\n\nvar (\n\tuninstallConfigPath string\n\tuninstallDBPath     string\n\tuninstallForce      bool\n\tuninstallPrintOnly  bool\n\tuninstallPIDFiles   []string\n)\n\ntype uninstallPlan struct {\n\tserverConfigPath string\n\tdatabasePath     string\n\tserverPIDFiles   []string\n\tconnectorPIDFile string\n\tcleanupPaths     []string\n}\n\nvar uninstallCmd = &cobra.Command{\n\tUse:   \"uninstall\",\n\tShort: \"Uninstall CH-UI from this machine\",\n\tLong: `Best-effort local uninstall for CH-UI.\nThis command stops services/processes, removes local CH-UI files, and prints manual\ncleanup commands for anything that still requires privileged shell access.`,\n\tRunE: runUninstall,\n}\n\nfunc init() {\n\tuninstallCmd.Flags().StringVarP(&uninstallConfigPath, \"config\", \"c\", \"\", \"Path to server config file (used to locate database)\")\n\tuninstallCmd.Flags().StringVar(&uninstallDBPath, \"db\", \"\", \"Override server SQLite database path\")\n\tuninstallCmd.Flags().BoolVar(&uninstallForce, \"force\", false, \"Continue uninstall even if some steps fail\")\n\tuninstallCmd.Flags().BoolVar(&uninstallPrintOnly, \"print-only\", false, \"Only print cleanup commands without executing uninstall\")\n\tuninstallCmd.Flags().StringSliceVar(&uninstallPIDFiles, \"pid-file\", nil, \"Additional server PID file path to stop/remove (repeatable)\")\n\n\trootCmd.AddCommand(uninstallCmd)\n}\n\nfunc runUninstall(cmd *cobra.Command, args []string) error {\n\tplan := buildUninstallPlan()\n\n\tfmt.Println(\"CH-UI uninstall (best effort)\")\n\tfmt.Printf(\"Server config: %s\\n\", plan.serverConfigPath)\n\tfmt.Printf(\"Database:      %s\\n\", plan.databasePath)\n\n\tif uninstallPrintOnly {\n\t\tprintManualUninstallCommands(plan)\n\t\treturn nil\n\t}\n\n\tvar failures []string\n\n\tif err := stopDetachedConnectProcess(plan.connectorPIDFile); err != nil {\n\t\tfailures = append(failures, err.Error())\n\t}\n\n\tfor _, pidFile := range plan.serverPIDFiles {\n\t\tif err := stopServerByPIDFile(pidFile); err != nil {\n\t\t\tfailures = append(failures, err.Error())\n\t\t}\n\t}\n\n\tif err := uninstallConnectorService(); err != nil {\n\t\tfailures = append(failures, err.Error())\n\t}\n\n\tif err := uninstallServerSystemService(); err != nil {\n\t\tfailures = append(failures, err.Error())\n\t}\n\n\tfor _, p := range plan.cleanupPaths {\n\t\tremoved, err := removePathIfExists(p)\n\t\tif err != nil {\n\t\t\tfailures = append(failures, fmt.Sprintf(\"remove %s: %v\", p, err))\n\t\t\tcontinue\n\t\t}\n\t\tif removed {\n\t\t\tfmt.Printf(\"Removed %s\\n\", p)\n\t\t}\n\t}\n\n\tprintManualUninstallCommands(plan)\n\n\tif len(failures) == 0 {\n\t\tfmt.Println(\"Uninstall completed.\")\n\t\treturn nil\n\t}\n\n\tfmt.Println(\"Uninstall completed with warnings:\")\n\tfor _, failure := range failures {\n\t\tfmt.Printf(\"  - %s\\n\", failure)\n\t}\n\n\tif uninstallForce {\n\t\treturn nil\n\t}\n\n\treturn errors.New(\"one or more uninstall steps failed (rerun with --force to continue)\" +\n\t\t\"\\nUse the manual cleanup commands shown above\")\n}\n\nfunc buildUninstallPlan() uninstallPlan {\n\tcfg := serverconfig.Load(uninstallConfigPath)\n\n\tif strings.TrimSpace(uninstallDBPath) != \"\" {\n\t\tcfg.DatabasePath = strings.TrimSpace(uninstallDBPath)\n\t}\n\n\tserverConfigPath := strings.TrimSpace(uninstallConfigPath)\n\tif serverConfigPath == \"\" {\n\t\tserverConfigPath = serverconfig.DefaultServerConfigPath()\n\t}\n\n\tpidFiles := append([]string{\"ch-ui-server.pid\", \"/var/lib/ch-ui/run/ch-ui-server.pid\"}, uninstallPIDFiles...)\n\tpidFiles = uniqueNonEmpty(pidFiles)\n\n\tcleanupPaths := []string{\n\t\tservice.BinaryPath,\n\t\tservice.GetConfigDir(),\n\t\tserverConfigPath,\n\t\tcfg.DatabasePath,\n\t\t\"ch-ui-server.log\",\n\t}\n\tcleanupPaths = append(cleanupPaths, pidFiles...)\n\n\tif runtime.GOOS == \"darwin\" {\n\t\thome, _ := os.UserHomeDir()\n\t\tcleanupPaths = append(cleanupPaths,\n\t\t\tfilepath.Join(home, \"Library\", \"LaunchAgents\", service.ServiceLabel+\".plist\"),\n\t\t\tfilepath.Join(home, \"Library\", \"Logs\", \"ch-ui\"),\n\t\t)\n\t}\n\n\tif runtime.GOOS == \"linux\" {\n\t\tcleanupPaths = append(cleanupPaths,\n\t\t\t\"/etc/systemd/system/ch-ui.service\",\n\t\t\t\"/etc/systemd/system/ch-ui-server.service\",\n\t\t)\n\t}\n\n\tcleanupPaths = uniqueNonEmpty(cleanupPaths)\n\n\treturn uninstallPlan{\n\t\tserverConfigPath: serverConfigPath,\n\t\tdatabasePath:     cfg.DatabasePath,\n\t\tserverPIDFiles:   pidFiles,\n\t\tconnectorPIDFile: filepath.Join(service.GetConfigDir(), \"ch-ui.pid\"),\n\t\tcleanupPaths:     cleanupPaths,\n\t}\n}\n\nfunc stopDetachedConnectProcess(pidFile string) error {\n\tpid, err := readPIDFile(pidFile)\n\tif err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\treturn nil\n\t\t}\n\t\treturn fmt.Errorf(\"read connect pid file %s: %w\", pidFile, err)\n\t}\n\n\tif !processExists(pid) {\n\t\t_ = os.Remove(pidFile)\n\t\treturn nil\n\t}\n\n\tproc, err := os.FindProcess(pid)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"locate connect process %d: %w\", pid, err)\n\t}\n\tif err := proc.Signal(syscall.SIGTERM); err != nil {\n\t\treturn fmt.Errorf(\"stop connect process %d: %w\", pid, err)\n\t}\n\n\tdeadline := time.Now().Add(10 * time.Second)\n\tfor time.Now().Before(deadline) {\n\t\tif !processExists(pid) {\n\t\t\t_ = os.Remove(pidFile)\n\t\t\tfmt.Printf(\"Stopped connect process (PID %d)\\n\", pid)\n\t\t\treturn nil\n\t\t}\n\t\ttime.Sleep(200 * time.Millisecond)\n\t}\n\n\treturn fmt.Errorf(\"timeout waiting for connect process %d to stop\", pid)\n}\n\nfunc stopServerByPIDFile(pidFile string) error {\n\tpid, running, err := getRunningServerPID(pidFile)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"inspect server pid file %s: %w\", pidFile, err)\n\t}\n\tif !running {\n\t\treturn nil\n\t}\n\n\tstopped, err := stopServer(pidFile, 10*time.Second)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"stop server process %d from %s: %w\", pid, pidFile, err)\n\t}\n\tif stopped {\n\t\tfmt.Printf(\"Stopped server process (PID %d) from %s\\n\", pid, pidFile)\n\t}\n\treturn nil\n}\n\nfunc uninstallConnectorService() error {\n\tsvc := service.New()\n\tif !svc.IsInstalled() {\n\t\tfmt.Println(\"Connector service is not installed\")\n\t\treturn nil\n\t}\n\n\tfmt.Println(\"Stopping connector service...\")\n\t_ = svc.Stop()\n\n\tfmt.Println(\"Uninstalling connector service...\")\n\tif err := svc.Uninstall(); err != nil {\n\t\treturn fmt.Errorf(\"uninstall connector service: %w\", err)\n\t}\n\n\tfmt.Println(\"Connector service uninstalled\")\n\treturn nil\n}\n\nfunc uninstallServerSystemService() error {\n\tif runtime.GOOS != \"linux\" {\n\t\treturn nil\n\t}\n\n\tvar warnings []string\n\tsteps := [][]string{\n\t\t{\"systemctl\", \"stop\", \"ch-ui-server\"},\n\t\t{\"systemctl\", \"disable\", \"ch-ui-server\"},\n\t\t{\"systemctl\", \"daemon-reload\"},\n\t}\n\n\tfor _, step := range steps {\n\t\tif err := runPrivileged(step[0], step[1:]...); err != nil {\n\t\t\twarnings = append(warnings, fmt.Sprintf(\"%s: %v\", strings.Join(step, \" \"), err))\n\t\t}\n\t}\n\n\tif len(warnings) == 0 {\n\t\treturn nil\n\t}\n\n\treturn errors.New(strings.Join(warnings, \"; \"))\n}\n\nfunc runPrivileged(name string, args ...string) error {\n\tcmdName := name\n\tcmdArgs := args\n\tif runtime.GOOS == \"linux\" && os.Geteuid() != 0 {\n\t\tcmdArgs = append([]string{name}, args...)\n\t\tcmdName = \"sudo\"\n\t}\n\n\tcmd := exec.Command(cmdName, cmdArgs...)\n\tout, err := cmd.CombinedOutput()\n\tif err == nil {\n\t\treturn nil\n\t}\n\n\tmsg := strings.TrimSpace(string(out))\n\tif msg == \"\" {\n\t\treturn err\n\t}\n\treturn fmt.Errorf(\"%w: %s\", err, msg)\n}\n\nfunc removePathIfExists(path string) (bool, error) {\n\tpath = strings.TrimSpace(path)\n\tif path == \"\" {\n\t\treturn false, nil\n\t}\n\tif path == \"/\" {\n\t\treturn false, fmt.Errorf(\"refusing to remove root path\")\n\t}\n\n\tinfo, err := os.Stat(path)\n\tif err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\treturn false, nil\n\t\t}\n\t\treturn false, err\n\t}\n\n\tif info.IsDir() {\n\t\tif err := os.RemoveAll(path); err != nil {\n\t\t\treturn false, err\n\t\t}\n\t\treturn true, nil\n\t}\n\n\tif err := os.Remove(path); err != nil {\n\t\treturn false, err\n\t}\n\treturn true, nil\n}\n\nfunc printManualUninstallCommands(plan uninstallPlan) {\n\tfmt.Println()\n\tfmt.Println(\"Manual cleanup commands (run if anything remains):\")\n\n\tfor _, cmd := range manualUninstallCommands(plan) {\n\t\tfmt.Printf(\"  %s\\n\", cmd)\n\t}\n\n\tfmt.Println()\n\tfmt.Println(\"Optional verification:\")\n\tfmt.Println(\"  ch-ui version\")\n\tfmt.Println(\"  ch-ui service status\")\n\tfmt.Println(\"  ch-ui server status\")\n}\n\nfunc manualUninstallCommands(plan uninstallPlan) []string {\n\tcommands := []string{}\n\n\tquotedConfig := shellQuote(plan.serverConfigPath)\n\tquotedDB := shellQuote(plan.databasePath)\n\n\tswitch runtime.GOOS {\n\tcase \"darwin\":\n\t\thome, _ := os.UserHomeDir()\n\t\tlaunchAgent := filepath.Join(home, \"Library\", \"LaunchAgents\", service.ServiceLabel+\".plist\")\n\t\tlogDir := filepath.Join(home, \"Library\", \"Logs\", \"ch-ui\")\n\n\t\tcommands = append(commands,\n\t\t\t\"launchctl unload \"+shellQuote(launchAgent)+\" 2>/dev/null || true\",\n\t\t\t\"rm -f \"+shellQuote(launchAgent),\n\t\t\t\"rm -rf \"+shellQuote(service.GetConfigDir()),\n\t\t\t\"rm -rf \"+shellQuote(logDir),\n\t\t\t\"rm -f \"+shellQuote(service.BinaryPath),\n\t\t\t\"rm -f \"+quotedConfig,\n\t\t\t\"rm -f \"+quotedDB,\n\t\t)\n\tdefault:\n\t\tcommands = append(commands,\n\t\t\t\"sudo systemctl stop ch-ui 2>/dev/null || true\",\n\t\t\t\"sudo systemctl disable ch-ui 2>/dev/null || true\",\n\t\t\t\"sudo rm -f /etc/systemd/system/ch-ui.service\",\n\t\t\t\"sudo systemctl stop ch-ui-server 2>/dev/null || true\",\n\t\t\t\"sudo systemctl disable ch-ui-server 2>/dev/null || true\",\n\t\t\t\"sudo rm -f /etc/systemd/system/ch-ui-server.service\",\n\t\t\t\"sudo systemctl daemon-reload\",\n\t\t\t\"sudo rm -rf \"+shellQuote(service.GetConfigDir()),\n\t\t\t\"sudo rm -f \"+shellQuote(service.BinaryPath),\n\t\t\t\"sudo rm -f \"+quotedConfig,\n\t\t\t\"sudo rm -f \"+quotedDB,\n\t\t)\n\t}\n\n\tif len(plan.serverPIDFiles) > 0 {\n\t\tvar quoted []string\n\t\tfor _, p := range plan.serverPIDFiles {\n\t\t\tquoted = append(quoted, shellQuote(p))\n\t\t}\n\t\tcommands = append(commands, \"rm -f \"+strings.Join(quoted, \" \"))\n\t}\n\tcommands = append(commands,\n\t\t\"rm -f \"+shellQuote(\"ch-ui-server.log\"),\n\t)\n\n\treturn commands\n}\n\nfunc shellQuote(s string) string {\n\treturn \"'\" + strings.ReplaceAll(s, \"'\", \"'\\\\''\") + \"'\"\n}\n\nfunc uniqueNonEmpty(in []string) []string {\n\tseen := make(map[string]struct{}, len(in))\n\tout := make([]string, 0, len(in))\n\tfor _, raw := range in {\n\t\tp := strings.TrimSpace(raw)\n\t\tif p == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tif _, ok := seen[p]; ok {\n\t\t\tcontinue\n\t\t}\n\t\tseen[p] = struct{}{}\n\t\tout = append(out, p)\n\t}\n\treturn out\n}\n"
  },
  {
    "path": "cmd/update.go",
    "content": "package cmd\n\nimport (\n\t\"crypto/sha256\"\n\t\"encoding/hex\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/http\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"runtime\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/version\"\n\t\"github.com/spf13/cobra\"\n)\n\nconst (\n\treleasesURL = \"https://api.github.com/repos/caioricciuti/ch-ui/releases/latest\"\n)\n\ntype ghRelease struct {\n\tTagName string    `json:\"tag_name\"`\n\tAssets  []ghAsset `json:\"assets\"`\n}\n\ntype ghAsset struct {\n\tName               string `json:\"name\"`\n\tBrowserDownloadURL string `json:\"browser_download_url\"`\n}\n\nvar updateCmd = &cobra.Command{\n\tUse:   \"update\",\n\tShort: \"Update CH-UI to the latest version\",\n\tLong:  \"Download the latest CH-UI release from GitHub and replace the current binary.\",\n\tRunE:  runUpdate,\n}\n\nvar (\n\tupdateRestartServer bool\n\tupdatePIDFile       string\n\tupdateStopTimeout   time.Duration\n)\n\nfunc init() {\n\tupdateCmd.Flags().BoolVar(&updateRestartServer, \"restart-server\", true, \"Automatically restart a running CH-UI server after update\")\n\tupdateCmd.Flags().StringVar(&updatePIDFile, \"pid-file\", \"ch-ui-server.pid\", \"Server PID file path used to detect/restart a running server\")\n\tupdateCmd.Flags().DurationVar(&updateStopTimeout, \"stop-timeout\", 10*time.Second, \"Graceful stop timeout used when restarting after update\")\n\trootCmd.AddCommand(updateCmd)\n}\n\nfunc runUpdate(cmd *cobra.Command, args []string) error {\n\t// Resolve PID file to absolute path so we can detect the running\n\t// server regardless of the caller's working directory.\n\tupdatePIDFile = resolvePIDFile(updatePIDFile)\n\n\t// Resolve current binary path\n\tcurrentBin, err := os.Executable()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to determine current binary path: %w\", err)\n\t}\n\tcurrentBin, err = filepath.EvalSymlinks(currentBin)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to resolve binary path: %w\", err)\n\t}\n\n\tvar runningPID int\n\tvar running bool\n\trestartArgs := []string{\"server\", \"--pid-file\", updatePIDFile}\n\tif updateRestartServer {\n\t\trunningPID, running, err = getRunningServerPID(updatePIDFile)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to inspect server status via PID file %q: %w\", updatePIDFile, err)\n\t\t}\n\t\tif running {\n\t\t\trestartArgs = detectServerRestartArgs(runningPID, updatePIDFile)\n\t\t\tfmt.Printf(\"Detected running CH-UI server (PID %d); it will be restarted after update.\\n\", runningPID)\n\t\t}\n\t}\n\n\t// Check write permissions\n\tdir := filepath.Dir(currentBin)\n\tif err := checkWritable(dir); err != nil {\n\t\treturn fmt.Errorf(\"cannot write to %s: %w (try running with sudo)\", dir, err)\n\t}\n\n\tfmt.Printf(\"Current version: %s\\n\", version.Version)\n\tfmt.Println(\"Checking for updates...\")\n\n\t// Fetch latest release info\n\trelease, err := fetchLatestRelease()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to check for updates: %w\", err)\n\t}\n\n\tlatestTag := release.TagName\n\tif latestTag == version.Version {\n\t\tfmt.Printf(\"Already up to date (%s)\\n\", version.Version)\n\t\treturn nil\n\t}\n\n\tfmt.Printf(\"New version available: %s → %s\\n\", version.Version, latestTag)\n\n\t// Find the right asset for this platform\n\tassetName := fmt.Sprintf(\"ch-ui-%s-%s\", runtime.GOOS, runtime.GOARCH)\n\tvar assetURL string\n\tvar checksumsURL string\n\tfor _, a := range release.Assets {\n\t\tif a.Name == assetName {\n\t\t\tassetURL = a.BrowserDownloadURL\n\t\t}\n\t\tif a.Name == \"checksums.txt\" {\n\t\t\tchecksumsURL = a.BrowserDownloadURL\n\t\t}\n\t}\n\tif assetURL == \"\" {\n\t\treturn fmt.Errorf(\"no release asset found for %s/%s (expected %s)\", runtime.GOOS, runtime.GOARCH, assetName)\n\t}\n\n\t// Download checksums\n\tvar expectedHash string\n\tif checksumsURL != \"\" {\n\t\texpectedHash, err = fetchExpectedChecksum(checksumsURL, assetName)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Warning: could not verify checksum: %v\\n\", err)\n\t\t}\n\t}\n\n\t// Download binary to temp file in the same directory (for atomic rename)\n\ttmpPath := currentBin + \".update-tmp\"\n\tfmt.Printf(\"Downloading %s...\\n\", assetName)\n\tif err := downloadFile(assetURL, tmpPath); err != nil {\n\t\tos.Remove(tmpPath)\n\t\treturn fmt.Errorf(\"failed to download update: %w\", err)\n\t}\n\n\t// Verify checksum\n\tif expectedHash != \"\" {\n\t\tactualHash, err := fileSHA256(tmpPath)\n\t\tif err != nil {\n\t\t\tos.Remove(tmpPath)\n\t\t\treturn fmt.Errorf(\"failed to compute checksum: %w\", err)\n\t\t}\n\t\tif actualHash != expectedHash {\n\t\t\tos.Remove(tmpPath)\n\t\t\treturn fmt.Errorf(\"checksum mismatch: expected %s, got %s\", expectedHash, actualHash)\n\t\t}\n\t\tfmt.Println(\"Checksum verified ✓\")\n\t}\n\n\t// Make executable\n\tif err := os.Chmod(tmpPath, 0755); err != nil {\n\t\tos.Remove(tmpPath)\n\t\treturn fmt.Errorf(\"failed to set permissions: %w\", err)\n\t}\n\n\t// Atomic replace\n\tif err := os.Rename(tmpPath, currentBin); err != nil {\n\t\tos.Remove(tmpPath)\n\t\treturn fmt.Errorf(\"failed to replace binary: %w\", err)\n\t}\n\n\tfmt.Printf(\"Updated successfully: %s → %s\\n\", version.Version, latestTag)\n\n\tif !updateRestartServer || !running {\n\t\tfmt.Println(\"Restart CH-UI to use the new version.\")\n\t\treturn nil\n\t}\n\n\tfmt.Printf(\"Restarting CH-UI server (PID %d)...\\n\", runningPID)\n\tstopped, err := stopServer(updatePIDFile, updateStopTimeout)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"binary updated to %s but failed to stop running server: %w\", latestTag, err)\n\t}\n\tif !stopped {\n\t\treturn fmt.Errorf(\"binary updated to %s but could not confirm server stop; run `ch-ui server restart --detach --pid-file %s`\", latestTag, updatePIDFile)\n\t}\n\n\tprevPIDFile := serverPIDFile\n\tserverPIDFile = updatePIDFile\n\tpid, logPath, err := startDetachedServer(restartArgs)\n\tserverPIDFile = prevPIDFile\n\tif err != nil {\n\t\treturn fmt.Errorf(\"binary updated to %s and server stopped, but failed to start it again: %w\", latestTag, err)\n\t}\n\n\tfmt.Printf(\"CH-UI server restarted in background (PID %d)\\n\", pid)\n\tif logPath != \"\" {\n\t\tfmt.Printf(\"Logs: %s\\n\", logPath)\n\t}\n\tfmt.Println(\"Update complete and running the new version.\")\n\treturn nil\n}\n\nfunc detectServerRestartArgs(pid int, pidFile string) []string {\n\targs, err := readProcessArgs(pid)\n\tif err != nil {\n\t\treturn []string{\"server\", \"--pid-file\", pidFile}\n\t}\n\tsanitized := sanitizeServerStartArgs(args, pidFile)\n\tif len(sanitized) == 0 {\n\t\treturn []string{\"server\", \"--pid-file\", pidFile}\n\t}\n\treturn sanitized\n}\n\nfunc readProcessArgs(pid int) ([]string, error) {\n\tif runtime.GOOS != \"linux\" {\n\t\treturn nil, fmt.Errorf(\"unsupported OS for process args inspection: %s\", runtime.GOOS)\n\t}\n\tdata, err := os.ReadFile(fmt.Sprintf(\"/proc/%d/cmdline\", pid))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tparts := strings.Split(string(data), \"\\x00\")\n\tif len(parts) == 0 {\n\t\treturn nil, fmt.Errorf(\"empty cmdline for PID %d\", pid)\n\t}\n\tout := make([]string, 0, len(parts))\n\tfor i, part := range parts {\n\t\tif i == 0 {\n\t\t\tcontinue // executable path\n\t\t}\n\t\tif strings.TrimSpace(part) == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tout = append(out, part)\n\t}\n\treturn out, nil\n}\n\nfunc sanitizeServerStartArgs(args []string, pidFile string) []string {\n\t// Safe fallback that keeps behavior predictable.\n\tout := []string{\"server\"}\n\n\t// Expect args from the running server process to start with \"server\"\n\t// (or \"server start\" in older/manual invocations).\n\ti := 0\n\tif len(args) > 0 && args[0] == \"server\" {\n\t\ti = 1\n\t\tif i < len(args) && args[i] == \"start\" {\n\t\t\ti++\n\t\t}\n\t}\n\n\tfor i < len(args) {\n\t\ta := args[i]\n\n\t\tswitch {\n\t\tcase a == \"server\" || a == \"start\" || a == \"stop\" || a == \"status\" || a == \"restart\":\n\t\t\ti++\n\t\tcase a == \"--detach\" || a == \"-h\" || a == \"--help\":\n\t\t\ti++\n\t\tcase a == \"--dev\":\n\t\t\tout = append(out, a)\n\t\t\ti++\n\t\tcase a == \"--port\" || a == \"-p\" ||\n\t\t\ta == \"--config\" || a == \"-c\" ||\n\t\t\ta == \"--clickhouse-url\" ||\n\t\t\ta == \"--connection-name\" ||\n\t\t\ta == \"--stop-timeout\":\n\t\t\tif i+1 < len(args) {\n\t\t\t\tout = append(out, a, args[i+1])\n\t\t\t\ti += 2\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\ti++\n\t\tcase a == \"--pid-file\":\n\t\t\tif i+1 < len(args) {\n\t\t\t\tout = append(out, a, resolvePIDFile(args[i+1]))\n\t\t\t\ti += 2\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\ti++\n\t\tcase strings.HasPrefix(a, \"--port=\") ||\n\t\t\tstrings.HasPrefix(a, \"--config=\") ||\n\t\t\tstrings.HasPrefix(a, \"--clickhouse-url=\") ||\n\t\t\tstrings.HasPrefix(a, \"--connection-name=\") ||\n\t\t\tstrings.HasPrefix(a, \"--stop-timeout=\"):\n\t\t\tout = append(out, a)\n\t\t\ti++\n\t\tcase strings.HasPrefix(a, \"--pid-file=\"):\n\t\t\tval := strings.TrimPrefix(a, \"--pid-file=\")\n\t\t\tout = append(out, \"--pid-file=\"+resolvePIDFile(val))\n\t\t\ti++\n\t\tdefault:\n\t\t\ti++\n\t\t}\n\t}\n\n\tif !hasFlag(out, \"--pid-file\") {\n\t\tout = append(out, \"--pid-file\", pidFile)\n\t}\n\treturn out\n}\n\nfunc hasFlag(args []string, longName string) bool {\n\tfor _, a := range args {\n\t\tif a == longName || strings.HasPrefix(a, longName+\"=\") {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}\n\nfunc fetchLatestRelease() (*ghRelease, error) {\n\tresp, err := http.Get(releasesURL)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn nil, fmt.Errorf(\"GitHub API returned %s\", resp.Status)\n\t}\n\n\tvar release ghRelease\n\tif err := json.NewDecoder(resp.Body).Decode(&release); err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to parse release info: %w\", err)\n\t}\n\treturn &release, nil\n}\n\nfunc fetchExpectedChecksum(url, assetName string) (string, error) {\n\tresp, err := http.Get(url)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer resp.Body.Close()\n\n\tbody, err := io.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tfor _, line := range strings.Split(string(body), \"\\n\") {\n\t\tparts := strings.Fields(line)\n\t\tif len(parts) == 2 && parts[1] == assetName {\n\t\t\treturn parts[0], nil\n\t\t}\n\t}\n\treturn \"\", fmt.Errorf(\"checksum not found for %s\", assetName)\n}\n\nfunc downloadFile(url, dest string) error {\n\tresp, err := http.Get(url)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn fmt.Errorf(\"download returned %s\", resp.Status)\n\t}\n\n\tf, err := os.Create(dest)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close()\n\n\t_, err = io.Copy(f, resp.Body)\n\treturn err\n}\n\nfunc fileSHA256(path string) (string, error) {\n\tf, err := os.Open(path)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer f.Close()\n\n\th := sha256.New()\n\tif _, err := io.Copy(h, f); err != nil {\n\t\treturn \"\", err\n\t}\n\treturn hex.EncodeToString(h.Sum(nil)), nil\n}\n\nfunc checkWritable(dir string) error {\n\ttmp := filepath.Join(dir, \".ch-ui-update-check\")\n\tf, err := os.Create(tmp)\n\tif err != nil {\n\t\treturn err\n\t}\n\tf.Close()\n\treturn os.Remove(tmp)\n}\n"
  },
  {
    "path": "cmd/version.go",
    "content": "package cmd\n\nimport (\n\t\"fmt\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/version\"\n\t\"github.com/spf13/cobra\"\n)\n\nvar versionCmd = &cobra.Command{\n\tUse:   \"version\",\n\tShort: \"Print version information\",\n\tRun: func(cmd *cobra.Command, args []string) {\n\t\tfmt.Printf(\"ch-ui %s (commit: %s, built: %s)\\n\", version.Version, version.Commit, version.BuildDate)\n\t},\n}\n\nfunc init() {\n\trootCmd.AddCommand(versionCmd)\n}\n"
  },
  {
    "path": "connector/clickhouse.go",
    "content": "package connector\n\nimport (\n\t\"bufio\"\n\t\"context\"\n\t\"crypto/tls\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"net\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"regexp\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n)\n\n// CHClient handles ClickHouse query execution\ntype CHClient struct {\n\tbaseURL   string\n\ttransport *http.Transport\n\thttpClient *http.Client\n}\n\n// NewCHClient creates a new ClickHouse HTTP client\nfunc NewCHClient(baseURL string, insecureSkipVerify bool) *CHClient {\n\ttransport := &http.Transport{\n\t\tDialContext: (&net.Dialer{\n\t\t\tTimeout:   30 * time.Second,\n\t\t\tKeepAlive: 30 * time.Second,\n\t\t}).DialContext,\n\t\tTLSClientConfig:       &tls.Config{InsecureSkipVerify: insecureSkipVerify},\n\t\tMaxIdleConns:          100,\n\t\tMaxIdleConnsPerHost:   10,\n\t\tIdleConnTimeout:       90 * time.Second,\n\t\tTLSHandshakeTimeout:   10 * time.Second,\n\t\tExpectContinueTimeout: 1 * time.Second,\n\t\tResponseHeaderTimeout: 5 * time.Minute,\n\t\tDisableKeepAlives:     false,\n\t\tForceAttemptHTTP2:     true,\n\t}\n\n\treturn &CHClient{\n\t\tbaseURL:   strings.TrimSuffix(baseURL, \"/\"),\n\t\ttransport: transport,\n\t\thttpClient: &http.Client{\n\t\t\tTransport: transport,\n\t\t\tTimeout:   5 * time.Minute,\n\t\t},\n\t}\n}\n\n// QueryResult holds the result of a query execution\ntype QueryResult struct {\n\tData       []map[string]interface{} `json:\"data\"`\n\tMeta       []ColumnMeta             `json:\"meta\"`\n\tRows       int                      `json:\"rows\"`\n\tStatistics struct {\n\t\tElapsed   float64 `json:\"elapsed\"`\n\t\tRowsRead  uint64  `json:\"rows_read\"`\n\t\tBytesRead uint64  `json:\"bytes_read\"`\n\t} `json:\"statistics\"`\n}\n\n// ColumnMeta describes a column in the result\ntype ColumnMeta struct {\n\tName string `json:\"name\"`\n\tType string `json:\"type\"`\n}\n\n// isTransientError checks if an error is a transient connection error that\n// should be retried (e.g. server closed an idle keep-alive connection).\nfunc isTransientError(err error) bool {\n\tif err == nil {\n\t\treturn false\n\t}\n\ts := err.Error()\n\tif strings.Contains(s, \"unexpected EOF\") ||\n\t\tstrings.Contains(s, \"connection reset by peer\") ||\n\t\tstrings.Contains(s, \"transport connection broken\") ||\n\t\tstrings.Contains(s, \"use of closed network connection\") {\n\t\treturn true\n\t}\n\tvar netErr net.Error\n\tif errors.As(err, &netErr) && netErr.Timeout() {\n\t\treturn false // real timeouts should not be retried\n\t}\n\treturn false\n}\n\n// doWithRetry executes an HTTP request, retrying once on transient connection errors.\nfunc (c *CHClient) doWithRetry(req *http.Request, client *http.Client) (*http.Response, error) {\n\tresp, err := client.Do(req)\n\tif err != nil && isTransientError(err) {\n\t\t// Close any idle connections that may be stale, then retry once.\n\t\tc.transport.CloseIdleConnections()\n\n\t\t// Clone the request for retry (the body must be re-readable).\n\t\tretryReq := req.Clone(req.Context())\n\t\tif req.GetBody != nil {\n\t\t\tbody, bodyErr := req.GetBody()\n\t\t\tif bodyErr != nil {\n\t\t\t\treturn nil, err // return original error\n\t\t\t}\n\t\t\tretryReq.Body = body\n\t\t}\n\t\treturn client.Do(retryReq)\n\t}\n\treturn resp, err\n}\n\n// Execute runs a query against ClickHouse\nfunc (c *CHClient) Execute(ctx context.Context, query, user, password string) (*QueryResult, error) {\n\t// Determine if this is a read or write query\n\tisWrite := isWriteQuery(query)\n\thasFormat := hasFormatClause(query)\n\n\t// Build URL with parameters\n\tparams := url.Values{}\n\tparams.Set(\"default_format\", \"JSON\")\n\n\t// For read queries without explicit FORMAT, add FORMAT JSON\n\tfinalQuery := query\n\tif !isWrite && !hasFormat {\n\t\tfinalQuery = strings.TrimRight(query, \"; \\n\\t\") + \" FORMAT JSON\"\n\t}\n\n\tfullURL := c.baseURL + \"/?\" + params.Encode()\n\n\t// Create request\n\treq, err := http.NewRequestWithContext(ctx, \"POST\", fullURL, strings.NewReader(finalQuery))\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create request: %w\", err)\n\t}\n\n\t// Set auth if provided\n\tif user != \"\" {\n\t\treq.SetBasicAuth(user, password)\n\t}\n\n\treq.Header.Set(\"Content-Type\", \"text/plain\")\n\n\t// GetBody allows doWithRetry to re-create the body on retry\n\tbodyStr := finalQuery\n\treq.GetBody = func() (io.ReadCloser, error) {\n\t\treturn io.NopCloser(strings.NewReader(bodyStr)), nil\n\t}\n\n\t// Execute with retry on transient connection errors\n\tresp, err := c.doWithRetry(req, c.httpClient)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"request failed: %w\", err)\n\t}\n\tdefer resp.Body.Close()\n\n\tbody, err := io.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to read response: %w\", err)\n\t}\n\n\t// Check for errors\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn nil, fmt.Errorf(\"ClickHouse error: %s\", string(body))\n\t}\n\n\t// For write queries or queries with explicit format, we may get empty body\n\tif len(body) == 0 || (isWrite && !hasFormat) {\n\t\treturn &QueryResult{\n\t\t\tData: []map[string]interface{}{},\n\t\t\tMeta: []ColumnMeta{},\n\t\t\tRows: 0,\n\t\t}, nil\n\t}\n\n\t// Parse JSON response\n\tvar result QueryResult\n\tif err := json.Unmarshal(body, &result); err != nil {\n\t\t// If JSON parse fails but status was OK, treat as DDL success\n\t\tif isWrite {\n\t\t\treturn &QueryResult{\n\t\t\t\tData: []map[string]interface{}{},\n\t\t\t\tMeta: []ColumnMeta{},\n\t\t\t\tRows: 0,\n\t\t\t}, nil\n\t\t}\n\t\treturn nil, fmt.Errorf(\"failed to parse response: %w (body: %s)\", err, truncate(string(body), 200))\n\t}\n\n\treturn &result, nil\n}\n\n// ExecuteRaw runs a query and returns the raw ClickHouse response bytes without intermediate parsing.\n// The format parameter controls the FORMAT clause appended to read queries (e.g. \"JSONCompact\").\nfunc (c *CHClient) ExecuteRaw(ctx context.Context, query, user, password, format string) (json.RawMessage, error) {\n\tisWrite := isWriteQuery(query)\n\thasFormat := hasFormatClause(query)\n\n\tfinalQuery := query\n\tif !isWrite && !hasFormat {\n\t\tif format == \"\" {\n\t\t\tformat = \"JSON\"\n\t\t}\n\t\tfinalQuery = strings.TrimRight(query, \"; \\n\\t\") + \" FORMAT \" + format\n\t}\n\n\tparams := url.Values{}\n\tparams.Set(\"default_format\", \"JSON\")\n\tfullURL := c.baseURL + \"/?\" + params.Encode()\n\n\treq, err := http.NewRequestWithContext(ctx, \"POST\", fullURL, strings.NewReader(finalQuery))\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to create request: %w\", err)\n\t}\n\tif user != \"\" {\n\t\treq.SetBasicAuth(user, password)\n\t}\n\treq.Header.Set(\"Content-Type\", \"text/plain\")\n\n\t// GetBody allows doWithRetry to re-create the body on retry\n\tbodyStr := finalQuery\n\treq.GetBody = func() (io.ReadCloser, error) {\n\t\treturn io.NopCloser(strings.NewReader(bodyStr)), nil\n\t}\n\n\tresp, err := c.doWithRetry(req, c.httpClient)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"request failed: %w\", err)\n\t}\n\tdefer resp.Body.Close()\n\n\tbody, err := io.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to read response: %w\", err)\n\t}\n\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn nil, fmt.Errorf(\"ClickHouse error: %s\", string(body))\n\t}\n\n\tif len(body) == 0 || (isWrite && !hasFormat) {\n\t\treturn json.RawMessage(`{\"data\":[],\"meta\":[],\"rows\":0}`), nil\n\t}\n\n\treturn json.RawMessage(body), nil\n}\n\n// StreamChunk holds a batch of rows for streaming execution.\ntype StreamChunk struct {\n\tSeq  int               `json:\"seq\"`\n\tData json.RawMessage   `json:\"data\"` // JSON array of arrays: [[v1,v2],[v3,v4],...]\n}\n\n// ExecuteStreaming runs a query using JSONCompactEachRow format, reading the response\n// line-by-line without buffering the entire result. It calls onMeta with column metadata,\n// then onChunk for each batch of chunkSize rows, and returns final statistics.\nfunc (c *CHClient) ExecuteStreaming(\n\tctx context.Context,\n\tquery, user, password string,\n\tchunkSize int,\n\tsettings map[string]string,\n\tonMeta func(meta json.RawMessage) error,\n\tonChunk func(seq int, data json.RawMessage) error,\n) (*json.RawMessage, int64, error) {\n\tisWrite := isWriteQuery(query)\n\thasFormat := hasFormatClause(query)\n\n\tif chunkSize <= 0 {\n\t\tchunkSize = 5000\n\t}\n\n\t// Get column metadata via a LIMIT 0 query with JSONCompact, or send empty meta for writes\n\tif !isWrite && !hasFormat {\n\t\ttrimmed := strings.TrimRight(query, \"; \\n\\t\")\n\t\tvar metaQuery string\n\t\tif limitRe := regexp.MustCompile(`(?i)\\bLIMIT\\s+\\d+(\\s*,\\s*\\d+)?(\\s+OFFSET\\s+\\d+)?`); limitRe.MatchString(trimmed) {\n\t\t\tmetaQuery = limitRe.ReplaceAllString(trimmed, \"LIMIT 0\")\n\t\t} else {\n\t\t\t// Use a newline so that trailing -- line comments don't swallow the injected clause\n\t\t\tmetaQuery = trimmed + \"\\nLIMIT 0\"\n\t\t}\n\t\tmetaResult, err := c.ExecuteRaw(ctx, metaQuery, user, password, \"JSONCompact\")\n\t\tif err != nil {\n\t\t\treturn nil, 0, fmt.Errorf(\"metadata query failed: %w\", err)\n\t\t}\n\t\tvar compact struct {\n\t\t\tMeta json.RawMessage `json:\"meta\"`\n\t\t}\n\t\tif err := json.Unmarshal(metaResult, &compact); err == nil && len(compact.Meta) > 0 {\n\t\t\tif err := onMeta(compact.Meta); err != nil {\n\t\t\t\treturn nil, 0, err\n\t\t\t}\n\t\t}\n\t} else {\n\t\t// Write queries: send empty meta so consumers always get exactly one meta message\n\t\tif err := onMeta(json.RawMessage(\"[]\")); err != nil {\n\t\t\treturn nil, 0, err\n\t\t}\n\t}\n\n\t// Now execute the actual query with JSONCompactEachRow for streaming\n\tfinalQuery := query\n\tif !isWrite && !hasFormat {\n\t\t// Use a newline so trailing -- line comments don't swallow the FORMAT clause\n\t\tfinalQuery = strings.TrimRight(query, \"; \\n\\t\") + \"\\nFORMAT JSONCompactEachRow\"\n\t}\n\n\t// Extract max_result_rows for precise client-side enforcement in the scanner loop.\n\tvar maxRows int64\n\tif v, ok := settings[\"max_result_rows\"]; ok {\n\t\tif n, err := strconv.ParseInt(v, 10, 64); err == nil && n > 0 {\n\t\t\tmaxRows = n\n\t\t}\n\t}\n\n\tparams := url.Values{}\n\tparams.Set(\"default_format\", \"JSON\")\n\tparams.Set(\"send_progress_in_http_headers\", \"0\")\n\t// Pass settings as ClickHouse HTTP URL params for coarse server-side abort.\n\t// max_result_rows + result_overflow_mode=break causes ClickHouse to stop at block\n\t// granularity (~65k rows), preventing the server from doing unbounded work.\n\t// The scanner loop below enforces the exact row count on top of this.\n\tfor k, v := range settings {\n\t\tparams.Set(k, v)\n\t}\n\tfullURL := c.baseURL + \"/?\" + params.Encode()\n\n\treq, err := http.NewRequestWithContext(ctx, \"POST\", fullURL, strings.NewReader(finalQuery))\n\tif err != nil {\n\t\treturn nil, 0, fmt.Errorf(\"failed to create request: %w\", err)\n\t}\n\tif user != \"\" {\n\t\treq.SetBasicAuth(user, password)\n\t}\n\treq.Header.Set(\"Content-Type\", \"text/plain\")\n\n\t// Use a client without timeout for streaming (context controls cancellation)\n\t// but share the configured transport for proper TLS and connection management.\n\tstreamClient := &http.Client{Transport: c.transport}\n\tresp, err := streamClient.Do(req)\n\tif err != nil {\n\t\treturn nil, 0, fmt.Errorf(\"request failed: %w\", err)\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK {\n\t\tbody, _ := io.ReadAll(io.LimitReader(resp.Body, 4096))\n\t\treturn nil, 0, fmt.Errorf(\"ClickHouse error: %s\", string(body))\n\t}\n\n\t// Read line by line, accumulate chunks\n\tscanner := bufio.NewScanner(resp.Body)\n\tscanner.Buffer(make([]byte, 0, 1024*1024), 10*1024*1024) // 10MB max line\n\n\tvar batch []json.RawMessage\n\tseq := 0\n\tvar totalRows int64\n\n\tfor scanner.Scan() {\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\treturn nil, totalRows, ctx.Err()\n\t\tdefault:\n\t\t}\n\n\t\tline := scanner.Bytes()\n\t\tif len(line) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Each line is a JSON array: [v1, v2, v3]\n\t\trow := make(json.RawMessage, len(line))\n\t\tcopy(row, line)\n\t\tbatch = append(batch, row)\n\t\ttotalRows++\n\n\t\t// Enforce max_result_rows limit: break early (closes body, aborts ClickHouse query)\n\t\tif maxRows > 0 && totalRows >= maxRows {\n\t\t\tbreak\n\t\t}\n\n\t\tif len(batch) >= chunkSize {\n\t\t\tchunkData, _ := json.Marshal(batch)\n\t\t\tif err := onChunk(seq, chunkData); err != nil {\n\t\t\t\treturn nil, totalRows, err\n\t\t\t}\n\t\t\tbatch = batch[:0]\n\t\t\tseq++\n\t\t}\n\t}\n\n\tif err := scanner.Err(); err != nil {\n\t\treturn nil, totalRows, fmt.Errorf(\"stream read error: %w\", err)\n\t}\n\n\t// Flush remaining rows\n\tif len(batch) > 0 {\n\t\tchunkData, _ := json.Marshal(batch)\n\t\tif err := onChunk(seq, chunkData); err != nil {\n\t\t\treturn nil, totalRows, err\n\t\t}\n\t}\n\n\t// We don't get statistics from JSONCompactEachRow format directly.\n\t// Return nil stats — the server can compute elapsed time itself.\n\treturn nil, totalRows, nil\n}\n\n// TestConnection verifies connectivity and returns the ClickHouse version\nfunc (c *CHClient) TestConnection(ctx context.Context, user, password string) (string, error) {\n\tquery := \"SELECT version() as version FORMAT JSON\"\n\n\tresult, err := c.Execute(ctx, query, user, password)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tif len(result.Data) > 0 {\n\t\tif v, ok := result.Data[0][\"version\"]; ok {\n\t\t\treturn fmt.Sprintf(\"%v\", v), nil\n\t\t}\n\t}\n\n\treturn \"unknown\", nil\n}\n\n// Query patterns\nvar (\n\twriteQueryPattern = regexp.MustCompile(`(?i)^\\s*(INSERT|CREATE|DROP|ALTER|TRUNCATE|RENAME|ATTACH|DETACH|OPTIMIZE|GRANT|REVOKE|KILL|SYSTEM|SET|USE)`)\n\tformatPattern     = regexp.MustCompile(`(?i)\\bFORMAT\\s+\\w+\\s*$`)\n\tcommentPattern    = regexp.MustCompile(`(?m)^\\s*--.*$`)\n)\n\nfunc isWriteQuery(query string) bool {\n\t// Strip leading comments\n\tstripped := commentPattern.ReplaceAllString(query, \"\")\n\tstripped = strings.TrimSpace(stripped)\n\treturn writeQueryPattern.MatchString(stripped)\n}\n\nfunc hasFormatClause(query string) bool {\n\treturn formatPattern.MatchString(strings.TrimSpace(query))\n}\n\nfunc truncate(s string, maxLen int) string {\n\tif len(s) <= maxLen {\n\t\treturn s\n\t}\n\treturn s[:maxLen] + \"...\"\n}\n"
  },
  {
    "path": "connector/config/config.go",
    "content": "package config\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"runtime\"\n\t\"strings\"\n\t\"time\"\n\n\t\"gopkg.in/yaml.v3\"\n)\n\n// Config holds all agent configuration\ntype Config struct {\n\t// Required\n\tToken string `yaml:\"tunnel_token\"`\n\n\t// URLs\n\tClickHouseURL string `yaml:\"clickhouse_url\"`\n\tTunnelURL     string `yaml:\"tunnel_url\"`\n\n\t// Timing\n\tReconnectDelay     time.Duration `yaml:\"reconnect_delay\"`\n\tMaxReconnectDelay  time.Duration `yaml:\"max_reconnect_delay\"`\n\tHeartbeatInterval  time.Duration `yaml:\"heartbeat_interval\"`\n\tInsecureSkipVerify bool          `yaml:\"insecure_skip_verify\"`\n\n\t// Output control\n\tVerbose bool `yaml:\"-\"`\n\tQuiet   bool `yaml:\"-\"`\n\tNoColor bool `yaml:\"-\"`\n\tJSON    bool `yaml:\"-\"`\n\t// Connect behavior\n\tTakeover bool `yaml:\"-\"`\n}\n\n// Default configuration values\nvar Defaults = Config{\n\tClickHouseURL:      \"http://localhost:8123\",\n\tTunnelURL:          \"ws://127.0.0.1:3488/connect\",\n\tReconnectDelay:     1 * time.Second,\n\tMaxReconnectDelay:  30 * time.Second,\n\tHeartbeatInterval:  30 * time.Second,\n\tInsecureSkipVerify: false,\n}\n\n// configFile is the YAML structure for config file\ntype configFile struct {\n\tTunnelToken        string `yaml:\"tunnel_token\"`\n\tClickHouseURL      string `yaml:\"clickhouse_url\"`\n\tTunnelURL          string `yaml:\"tunnel_url\"`\n\tInsecureSkipVerify bool   `yaml:\"insecure_skip_verify\"`\n}\n\n// DefaultConfigPath returns the platform-specific default config path\nfunc DefaultConfigPath() string {\n\tswitch runtime.GOOS {\n\tcase \"darwin\":\n\t\thome, _ := os.UserHomeDir()\n\t\treturn filepath.Join(home, \".config\", \"ch-ui\", \"config.yaml\")\n\tdefault: // linux and others\n\t\treturn \"/etc/ch-ui/config.yaml\"\n\t}\n}\n\n// Load creates a Config by merging: CLI flags -> config file -> environment variables\n// Priority: CLI flags override config file, config file overrides env vars\nfunc Load(configPath string, cliConfig *Config) (*Config, error) {\n\tcfg := Defaults\n\n\t// 1. Load from config file (lowest priority after defaults)\n\tif configPath != \"\" {\n\t\tif err := loadFromFile(configPath, &cfg); err != nil {\n\t\t\t// Only error if file was explicitly specified and doesn't exist\n\t\t\tif !os.IsNotExist(err) {\n\t\t\t\treturn nil, fmt.Errorf(\"failed to load config file: %w\", err)\n\t\t\t}\n\t\t}\n\t} else {\n\t\t// Try default path, ignore if not exists\n\t\t_ = loadFromFile(DefaultConfigPath(), &cfg)\n\t}\n\n\t// 2. Override with environment variables\n\tloadFromEnv(&cfg)\n\n\t// 3. Override with CLI flags (highest priority)\n\tif cliConfig != nil {\n\t\tmergeConfig(&cfg, cliConfig)\n\t}\n\n\t// Validate\n\tif err := cfg.Validate(); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &cfg, nil\n}\n\nfunc loadFromFile(path string, cfg *Config) error {\n\tdata, err := os.ReadFile(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar fc configFile\n\tif err := yaml.Unmarshal(data, &fc); err != nil {\n\t\treturn fmt.Errorf(\"invalid YAML: %w\", err)\n\t}\n\n\tif fc.TunnelToken != \"\" {\n\t\tcfg.Token = fc.TunnelToken\n\t}\n\tif fc.ClickHouseURL != \"\" {\n\t\tcfg.ClickHouseURL = fc.ClickHouseURL\n\t}\n\tif fc.TunnelURL != \"\" {\n\t\tcfg.TunnelURL = fc.TunnelURL\n\t}\n\tcfg.InsecureSkipVerify = fc.InsecureSkipVerify\n\n\treturn nil\n}\n\nfunc loadFromEnv(cfg *Config) {\n\tif v := os.Getenv(\"TUNNEL_TOKEN\"); v != \"\" {\n\t\tcfg.Token = v\n\t}\n\tif v := os.Getenv(\"CLICKHOUSE_URL\"); v != \"\" {\n\t\tcfg.ClickHouseURL = v\n\t}\n\tif v := os.Getenv(\"TUNNEL_URL\"); v != \"\" {\n\t\tcfg.TunnelURL = v\n\t}\n\tif v := os.Getenv(\"TUNNEL_INSECURE_SKIP_VERIFY\"); v == \"1\" || strings.EqualFold(v, \"true\") || strings.EqualFold(v, \"yes\") {\n\t\tcfg.InsecureSkipVerify = true\n\t}\n}\n\nfunc mergeConfig(dst, src *Config) {\n\tif src.Token != \"\" {\n\t\tdst.Token = src.Token\n\t}\n\tif src.ClickHouseURL != \"\" && src.ClickHouseURL != Defaults.ClickHouseURL {\n\t\tdst.ClickHouseURL = src.ClickHouseURL\n\t}\n\tif src.TunnelURL != \"\" && src.TunnelURL != Defaults.TunnelURL {\n\t\tdst.TunnelURL = src.TunnelURL\n\t}\n\tif src.ReconnectDelay != 0 && src.ReconnectDelay != Defaults.ReconnectDelay {\n\t\tdst.ReconnectDelay = src.ReconnectDelay\n\t}\n\tif src.MaxReconnectDelay != 0 && src.MaxReconnectDelay != Defaults.MaxReconnectDelay {\n\t\tdst.MaxReconnectDelay = src.MaxReconnectDelay\n\t}\n\tif src.HeartbeatInterval != 0 && src.HeartbeatInterval != Defaults.HeartbeatInterval {\n\t\tdst.HeartbeatInterval = src.HeartbeatInterval\n\t}\n\tdst.Verbose = src.Verbose\n\tdst.Quiet = src.Quiet\n\tdst.NoColor = src.NoColor\n\tdst.JSON = src.JSON\n\tdst.Takeover = src.Takeover\n\tif src.InsecureSkipVerify {\n\t\tdst.InsecureSkipVerify = true\n\t}\n}\n\n// Validate checks if the configuration is valid\nfunc (c *Config) Validate() error {\n\tif c.Token == \"\" {\n\t\treturn fmt.Errorf(\"tunnel token is required (use --key, TUNNEL_TOKEN env, or config file)\")\n\t}\n\n\tif !strings.HasPrefix(c.Token, \"cht_\") {\n\t\treturn fmt.Errorf(\"invalid tunnel token format (should start with 'cht_')\")\n\t}\n\n\tif !strings.HasPrefix(c.TunnelURL, \"ws://\") && !strings.HasPrefix(c.TunnelURL, \"wss://\") {\n\t\treturn fmt.Errorf(\"tunnel URL must start with ws:// or wss://\")\n\t}\n\n\tif !strings.HasPrefix(c.ClickHouseURL, \"http://\") && !strings.HasPrefix(c.ClickHouseURL, \"https://\") {\n\t\treturn fmt.Errorf(\"ClickHouse URL must start with http:// or https://\")\n\t}\n\n\treturn nil\n}\n\n// GenerateTemplate returns a YAML config template\nfunc GenerateTemplate() string {\n\treturn `# CH-UI Agent Configuration\n#\n# This file can be placed at:\n#   - Linux: /etc/ch-ui/config.yaml\n#   - macOS: ~/.config/ch-ui/config.yaml\n#\n# All settings can also be specified via environment variables or CLI flags.\n# Priority: CLI flags > Environment variables > Config file\n\n# Required: Your tunnel token from CH-UI server (ch-ui tunnel create --name <name>)\ntunnel_token: \"cht_your_token_here\"\n\n# ClickHouse HTTP API URL (default: http://localhost:8123)\nclickhouse_url: \"http://localhost:8123\"\n\n# CH-UI tunnel URL (default: ws://127.0.0.1:3488/connect)\ntunnel_url: \"ws://127.0.0.1:3488/connect\"\n\n# Skip TLS certificate validation for tunnel connection (unsafe, dev only)\n# insecure_skip_verify: false\n`\n}\n\n// Redacted returns a copy of the config with sensitive fields redacted\nfunc (c *Config) Redacted() Config {\n\tredacted := *c\n\tif redacted.Token != \"\" {\n\t\tif len(redacted.Token) > 8 {\n\t\t\tredacted.Token = redacted.Token[:8] + \"...\"\n\t\t} else {\n\t\t\tredacted.Token = \"***\"\n\t\t}\n\t}\n\treturn redacted\n}\n"
  },
  {
    "path": "connector/connector.go",
    "content": "package connector\n\nimport (\n\t\"context\"\n\t\"crypto/tls\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"strings\"\n\t\"sync\"\n\t\"sync/atomic\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/connector/config\"\n\t\"github.com/caioricciuti/ch-ui/connector/ui\"\n\t\"github.com/gorilla/websocket\"\n)\n\n// Connector manages the tunnel connection to a CH-UI server\ntype Connector struct {\n\tcfg      *config.Config\n\tui       *ui.UI\n\tchClient *CHClient\n\n\tconn          *websocket.Conn\n\tconnMu        sync.Mutex\n\tauthenticated bool\n\tstartTime     time.Time\n\n\t// Stats\n\tqueriesExecuted atomic.Int64\n\tlastQueryTime   atomic.Int64\n\n\t// Control\n\tctx    context.Context\n\tcancel context.CancelFunc\n\tdone   chan struct{}\n\n\t// Reconnection\n\treconnectDelay time.Duration\n}\n\n// New creates a new Connector instance\nfunc New(cfg *config.Config, u *ui.UI) *Connector {\n\tctx, cancel := context.WithCancel(context.Background())\n\n\treturn &Connector{\n\t\tcfg:            cfg,\n\t\tui:             u,\n\t\tchClient:       NewCHClient(cfg.ClickHouseURL, cfg.InsecureSkipVerify),\n\t\treconnectDelay: cfg.ReconnectDelay,\n\t\tctx:            ctx,\n\t\tcancel:         cancel,\n\t\tdone:           make(chan struct{}),\n\t}\n}\n\n// Run starts the connector and blocks until shutdown\nfunc (c *Connector) Run() error {\n\tc.startTime = time.Now()\n\n\t// Initial connection\n\tif err := c.connect(); err != nil {\n\t\tif ce, ok := err.(*ConnectError); ok && ce.Type == \"auth\" {\n\t\t\tc.ui.Error(\"Authentication failed — not retrying (token may be invalid or revoked)\")\n\t\t\treturn err\n\t\t}\n\t\treturn err\n\t}\n\n\t// Start message handler\n\tgo c.messageLoop()\n\n\t// Start heartbeat\n\tgo c.heartbeatLoop()\n\n\t// Start host info reporting\n\tgo c.hostInfoLoop()\n\n\t// Wait for shutdown\n\t<-c.done\n\treturn nil\n}\n\n// Shutdown gracefully stops the connector\nfunc (c *Connector) Shutdown() {\n\tc.cancel()\n\tc.connMu.Lock()\n\tif c.conn != nil {\n\t\tc.conn.WriteMessage(websocket.CloseMessage,\n\t\t\twebsocket.FormatCloseMessage(websocket.CloseNormalClosure, \"shutdown\"))\n\t\tc.conn.Close()\n\t}\n\tc.connMu.Unlock()\n\tclose(c.done)\n}\n\n// Stats returns current connector statistics\nfunc (c *Connector) Stats() (queriesExecuted int64, uptime time.Duration, lastQuery time.Time) {\n\tqueriesExecuted = c.queriesExecuted.Load()\n\tuptime = time.Since(c.startTime)\n\tif ts := c.lastQueryTime.Load(); ts > 0 {\n\t\tlastQuery = time.Unix(0, ts)\n\t}\n\treturn\n}\n\n// ConnectError represents a classified connection error\ntype ConnectError struct {\n\tType    string // \"network\", \"auth\", \"server\", \"protocol\"\n\tMessage string\n\tErr     error\n}\n\nfunc (e *ConnectError) Error() string {\n\treturn e.Message\n}\n\nfunc (c *Connector) connect() error {\n\tc.ui.Info(\"Connecting to %s...\", extractHost(c.cfg.TunnelURL))\n\n\tdialer := websocket.Dialer{\n\t\tHandshakeTimeout: 10 * time.Second,\n\t\tTLSClientConfig:  &tls.Config{InsecureSkipVerify: c.cfg.InsecureSkipVerify},\n\t}\n\n\tif c.cfg.InsecureSkipVerify {\n\t\tc.ui.Warn(\"TLS certificate verification is disabled (insecure_skip_verify=true)\")\n\t}\n\n\theaders := http.Header{}\n\theaders.Set(\"User-Agent\", \"ch-ui-agent/1.0\")\n\n\tconn, dialResp, err := dialer.DialContext(c.ctx, c.cfg.TunnelURL, headers)\n\tif err != nil {\n\t\tdialErr := err\n\t\tif dialResp != nil {\n\t\t\tbody, _ := io.ReadAll(io.LimitReader(dialResp.Body, 2048))\n\t\t\tdialResp.Body.Close()\n\t\t\tif len(body) > 0 {\n\t\t\t\tdialErr = fmt.Errorf(\"%w (status=%d body=%q)\", err, dialResp.StatusCode, strings.TrimSpace(string(body)))\n\t\t\t} else {\n\t\t\t\tdialErr = fmt.Errorf(\"%w (status=%d)\", err, dialResp.StatusCode)\n\t\t\t}\n\t\t}\n\n\t\tc.ui.ConnectionError(dialErr, c.cfg.TunnelURL)\n\t\treturn &ConnectError{Type: \"network\", Message: \"Failed to connect to CH-UI server\", Err: dialErr}\n\t}\n\n\tc.connMu.Lock()\n\tc.conn = conn\n\tc.connMu.Unlock()\n\n\t// Send auth message\n\tauthMsg := AgentMessage{\n\t\tType:     MsgTypeAuth,\n\t\tToken:    c.cfg.Token,\n\t\tTakeover: c.cfg.Takeover,\n\t}\n\n\tif err := c.send(authMsg); err != nil {\n\t\tconn.Close()\n\t\tc.ui.ConnectionError(err, c.cfg.TunnelURL)\n\t\treturn &ConnectError{Type: \"network\", Message: \"Failed to send authentication\", Err: err}\n\t}\n\n\tc.ui.Debug(\"Auth message sent, waiting for response...\")\n\n\t// Wait for auth response\n\tconn.SetReadDeadline(time.Now().Add(10 * time.Second))\n\t_, message, err := conn.ReadMessage()\n\tif err != nil {\n\t\tconn.Close()\n\t\tc.ui.ConnectionError(err, c.cfg.TunnelURL)\n\t\treturn &ConnectError{Type: \"network\", Message: \"Failed to receive auth response\", Err: err}\n\t}\n\tconn.SetReadDeadline(time.Time{}) // Clear deadline\n\n\tvar authResp GatewayMessage\n\tif err := json.Unmarshal(message, &authResp); err != nil {\n\t\tconn.Close()\n\t\tc.ui.DiagnosticError(ui.ErrorTypeServer, \"CH-UI Server\",\n\t\t\t\"Received invalid response from server\",\n\t\t\t[]string{\n\t\t\t\t\"The server may be running an incompatible version\",\n\t\t\t\t\"Try updating the agent to the latest version\",\n\t\t\t\t\"Contact support if the issue persists\",\n\t\t\t})\n\t\treturn &ConnectError{Type: \"protocol\", Message: \"Invalid server response\", Err: err}\n\t}\n\n\tswitch authResp.Type {\n\tcase MsgTypeAuthOK:\n\t\tc.authenticated = true\n\t\tc.reconnectDelay = c.cfg.ReconnectDelay // Reset on successful connection\n\t\tc.ui.Success(\"Authenticated successfully\")\n\t\tc.ui.Success(\"Tunnel established\")\n\t\tc.ui.Status(c.cfg.TunnelURL, c.cfg.ClickHouseURL, time.Since(c.startTime))\n\t\treturn nil\n\n\tcase MsgTypeAuthError:\n\t\tconn.Close()\n\t\t// Server may send error in either \"error\" or \"message\" field\n\t\terrMsg := authResp.Error\n\t\tif errMsg == \"\" {\n\t\t\terrMsg = authResp.Message\n\t\t}\n\t\tif errMsg == \"\" {\n\t\t\terrMsg = \"Authentication failed (no details provided)\"\n\t\t}\n\t\tif isPermanentAuthError(errMsg) {\n\t\t\tc.ui.AuthError(errMsg)\n\t\t\treturn &ConnectError{Type: \"auth\", Message: errMsg}\n\t\t}\n\n\t\tc.ui.Warn(\"Server temporarily rejected authentication: %s\", errMsg)\n\t\treturn &ConnectError{Type: \"server\", Message: errMsg}\n\n\tdefault:\n\t\tconn.Close()\n\t\tc.ui.DiagnosticError(ui.ErrorTypeServer, \"CH-UI Server\",\n\t\t\tfmt.Sprintf(\"Unexpected response type: %s\", authResp.Type),\n\t\t\t[]string{\n\t\t\t\t\"The server may be running an incompatible version\",\n\t\t\t\t\"Try updating the agent to the latest version\",\n\t\t\t})\n\t\treturn &ConnectError{Type: \"protocol\", Message: fmt.Sprintf(\"Unexpected response: %s\", authResp.Type)}\n\t}\n}\n\nfunc isPermanentAuthError(msg string) bool {\n\tlower := strings.ToLower(strings.TrimSpace(msg))\n\tif lower == \"\" {\n\t\treturn false\n\t}\n\n\treturn strings.Contains(lower, \"invalid tunnel token\") ||\n\t\tstrings.Contains(lower, \"invalid token\") ||\n\t\tstrings.Contains(lower, \"revoked\")\n}\n\nfunc (c *Connector) messageLoop() {\n\tfor {\n\t\tselect {\n\t\tcase <-c.ctx.Done():\n\t\t\treturn\n\t\tdefault:\n\t\t}\n\n\t\tc.connMu.Lock()\n\t\tconn := c.conn\n\t\tc.connMu.Unlock()\n\n\t\tif conn == nil {\n\t\t\ttime.Sleep(100 * time.Millisecond)\n\t\t\tcontinue\n\t\t}\n\n\t\t_, message, err := conn.ReadMessage()\n\t\tif err != nil {\n\t\t\tif websocket.IsCloseError(err, websocket.CloseNormalClosure, websocket.CloseGoingAway) {\n\t\t\t\tc.ui.Disconnected(\"server closed connection\")\n\t\t\t} else {\n\t\t\t\tc.ui.Disconnected(err.Error())\n\t\t\t}\n\n\t\t\tc.connMu.Lock()\n\t\t\tc.conn = nil\n\t\t\tc.authenticated = false\n\t\t\tc.connMu.Unlock()\n\n\t\t\t// Attempt reconnection\n\t\t\tc.reconnect()\n\t\t\tcontinue\n\t\t}\n\n\t\tvar msg GatewayMessage\n\t\tif err := json.Unmarshal(message, &msg); err != nil {\n\t\t\tc.ui.Debug(\"Invalid message: %v\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\tc.handleMessage(msg)\n\t}\n}\n\nfunc (c *Connector) handleMessage(msg GatewayMessage) {\n\tswitch msg.Type {\n\tcase MsgTypePing:\n\t\tc.send(AgentMessage{Type: MsgTypePong})\n\n\tcase MsgTypeQuery:\n\t\tgo c.executeQuery(msg)\n\n\tcase MsgTypeQueryStream:\n\t\tgo c.executeStreamQuery(msg)\n\n\tcase MsgTypeTestConnection:\n\t\tgo c.testConnection(msg)\n\n\tcase MsgTypeCancelQuery:\n\t\tc.ui.Debug(\"Cancel query requested for %s (not implemented)\", msg.QueryID)\n\n\tdefault:\n\t\tc.ui.Debug(\"Unknown message type: %s\", msg.Type)\n\t}\n}\n\nfunc (c *Connector) executeQuery(msg GatewayMessage) {\n\tstart := time.Now()\n\tqueryID := msg.QueryID\n\tsql := msg.Query\n\n\tformat := msg.Format // \"\" or \"JSON\" = legacy, \"JSONCompact\" = tier 1\n\n\t// If a compact format is requested, use ExecuteRaw to avoid intermediate parsing\n\tif format != \"\" && format != \"JSON\" {\n\t\traw, err := c.chClient.ExecuteRaw(c.ctx, sql, msg.User, msg.Password, format)\n\t\telapsed := time.Since(start)\n\n\t\tif err != nil {\n\t\t\tc.ui.QueryError(queryID, err)\n\t\t\tc.send(AgentMessage{\n\t\t\t\tType:    MsgTypeQueryError,\n\t\t\t\tQueryID: queryID,\n\t\t\t\tError:   err.Error(),\n\t\t\t})\n\t\t\treturn\n\t\t}\n\n\t\tc.queriesExecuted.Add(1)\n\t\tc.lastQueryTime.Store(time.Now().UnixNano())\n\t\tc.ui.QueryLog(queryID, elapsed, 0)\n\n\t\t// Send raw bytes directly — no intermediate parse/reserialize\n\t\tc.send(AgentMessage{\n\t\t\tType:    MsgTypeQueryResult,\n\t\t\tQueryID: queryID,\n\t\t\tData:    raw,\n\t\t})\n\t\treturn\n\t}\n\n\t// Legacy JSON path — parse into structured result\n\tresult, err := c.chClient.Execute(c.ctx, sql, msg.User, msg.Password)\n\telapsed := time.Since(start)\n\n\tif err != nil {\n\t\tc.ui.QueryError(queryID, err)\n\t\tc.send(AgentMessage{\n\t\t\tType:    MsgTypeQueryError,\n\t\t\tQueryID: queryID,\n\t\t\tError:   err.Error(),\n\t\t})\n\t\treturn\n\t}\n\n\tc.queriesExecuted.Add(1)\n\tc.lastQueryTime.Store(time.Now().UnixNano())\n\n\trows := len(result.Data)\n\tc.ui.QueryLog(queryID, elapsed, rows)\n\n\tc.send(AgentMessage{\n\t\tType:    MsgTypeQueryResult,\n\t\tQueryID: queryID,\n\t\tData:    result.Data,\n\t\tMeta:    result.Meta,\n\t\tStats: &QueryStats{\n\t\t\tElapsed:   result.Statistics.Elapsed,\n\t\t\tRowsRead:  result.Statistics.RowsRead,\n\t\t\tBytesRead: result.Statistics.BytesRead,\n\t\t},\n\t})\n}\n\nfunc (c *Connector) executeStreamQuery(msg GatewayMessage) {\n\tstart := time.Now()\n\tqueryID := msg.QueryID\n\tsql := msg.Query\n\n\tc.ui.Debug(\"Stream query %s: %s\", queryID, truncateStr(sql, 80))\n\n\t// Send chunks as they arrive\n\tonMeta := func(meta json.RawMessage) error {\n\t\treturn c.send(AgentMessage{\n\t\t\tType:    MsgTypeQueryStreamStart,\n\t\t\tQueryID: queryID,\n\t\t\tMeta:    meta,\n\t\t})\n\t}\n\n\tonChunk := func(seq int, data json.RawMessage) error {\n\t\treturn c.send(AgentMessage{\n\t\t\tType:    MsgTypeQueryStreamChunk,\n\t\t\tQueryID: queryID,\n\t\t\tData:    data,\n\t\t\tSeq:     seq,\n\t\t})\n\t}\n\n\t_, totalRows, err := c.chClient.ExecuteStreaming(c.ctx, sql, msg.User, msg.Password, 5000, msg.Settings, onMeta, onChunk)\n\telapsed := time.Since(start)\n\n\tif err != nil {\n\t\tc.ui.QueryError(queryID, err)\n\t\tc.send(AgentMessage{\n\t\t\tType:    MsgTypeQueryStreamError,\n\t\t\tQueryID: queryID,\n\t\t\tError:   err.Error(),\n\t\t})\n\t\treturn\n\t}\n\n\tc.queriesExecuted.Add(1)\n\tc.lastQueryTime.Store(time.Now().UnixNano())\n\tc.ui.QueryLog(queryID, elapsed, int(totalRows))\n\n\tc.send(AgentMessage{\n\t\tType:      MsgTypeQueryStreamEnd,\n\t\tQueryID:   queryID,\n\t\tTotalRows: totalRows,\n\t\tStats: &QueryStats{\n\t\t\tElapsed: elapsed.Seconds(),\n\t\t},\n\t})\n}\n\nfunc truncateStr(s string, maxLen int) string {\n\tif len(s) <= maxLen {\n\t\treturn s\n\t}\n\treturn s[:maxLen] + \"...\"\n}\n\nfunc (c *Connector) testConnection(msg GatewayMessage) {\n\tversion, err := c.chClient.TestConnection(c.ctx, msg.User, msg.Password)\n\n\tif err != nil {\n\t\tc.ui.Debug(\"Connection test failed: %v\", err)\n\t\tc.send(AgentMessage{\n\t\t\tType:    MsgTypeTestResult,\n\t\t\tQueryID: msg.QueryID,\n\t\t\tOnline:  false,\n\t\t\tError:   err.Error(),\n\t\t})\n\t\treturn\n\t}\n\n\tc.ui.Debug(\"Connection test successful, version: %s\", version)\n\tc.send(AgentMessage{\n\t\tType:    MsgTypeTestResult,\n\t\tQueryID: msg.QueryID,\n\t\tOnline:  true,\n\t\tVersion: version,\n\t})\n}\n\nfunc (c *Connector) heartbeatLoop() {\n\tticker := time.NewTicker(c.cfg.HeartbeatInterval)\n\tdefer ticker.Stop()\n\n\tfor {\n\t\tselect {\n\t\tcase <-c.ctx.Done():\n\t\t\treturn\n\t\tcase <-ticker.C:\n\t\t\tc.connMu.Lock()\n\t\t\tconn := c.conn\n\t\t\tauthenticated := c.authenticated\n\t\t\tif conn != nil && authenticated {\n\t\t\t\tif err := conn.WriteControl(websocket.PingMessage, nil, time.Now().Add(5*time.Second)); err != nil {\n\t\t\t\t\tc.ui.Debug(\"Heartbeat failed: %v\", err)\n\t\t\t\t}\n\t\t\t}\n\t\t\tc.connMu.Unlock()\n\t\t}\n\t}\n}\n\n// sendHostInfo collects and sends host machine metrics to the server\nfunc (c *Connector) sendHostInfo() {\n\thostInfo := CollectHostInfo(c.startTime)\n\n\tif err := c.send(AgentMessage{\n\t\tType:     MsgTypeHostInfo,\n\t\tHostInfo: hostInfo,\n\t}); err != nil {\n\t\tc.ui.Debug(\"Failed to send host info: %v\", err)\n\t} else {\n\t\tc.ui.Debug(\"Host info sent (CPU: %d cores, Mem: %d MB, Disk: %d GB)\",\n\t\t\thostInfo.CPUCores,\n\t\t\thostInfo.MemoryTotal/(1024*1024),\n\t\t\thostInfo.DiskTotal/(1024*1024*1024))\n\t}\n}\n\n// hostInfoLoop sends host info periodically (every 60 seconds)\nfunc (c *Connector) hostInfoLoop() {\n\t// Send initial host info after a short delay to allow auth to complete\n\ttime.Sleep(2 * time.Second)\n\tc.sendHostInfo()\n\n\tticker := time.NewTicker(60 * time.Second)\n\tdefer ticker.Stop()\n\n\tfor {\n\t\tselect {\n\t\tcase <-c.ctx.Done():\n\t\t\treturn\n\t\tcase <-ticker.C:\n\t\t\tc.connMu.Lock()\n\t\t\tauthenticated := c.authenticated\n\t\t\tc.connMu.Unlock()\n\n\t\t\tif authenticated {\n\t\t\t\tc.sendHostInfo()\n\t\t\t}\n\t\t}\n\t}\n}\n\nfunc (c *Connector) reconnect() {\n\tfor {\n\t\tselect {\n\t\tcase <-c.ctx.Done():\n\t\t\treturn\n\t\tdefault:\n\t\t}\n\n\t\tc.ui.Reconnecting(c.reconnectDelay)\n\t\ttime.Sleep(c.reconnectDelay)\n\n\t\t// Exponential backoff\n\t\tc.reconnectDelay *= 2\n\t\tif c.reconnectDelay > c.cfg.MaxReconnectDelay {\n\t\t\tc.reconnectDelay = c.cfg.MaxReconnectDelay\n\t\t}\n\n\t\tif err := c.connect(); err != nil {\n\t\t\tif ce, ok := err.(*ConnectError); ok && ce.Type == \"auth\" {\n\t\t\t\tc.ui.Error(\"Authentication failed — stopping reconnection (token is invalid or revoked)\")\n\t\t\t\tclose(c.done)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tc.ui.Error(\"Reconnection failed: %v\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\treturn\n\t}\n}\n\nfunc (c *Connector) send(msg AgentMessage) error {\n\tc.connMu.Lock()\n\tdefer c.connMu.Unlock()\n\n\tif c.conn == nil {\n\t\treturn fmt.Errorf(\"not connected\")\n\t}\n\n\tdata, err := json.Marshal(msg)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn c.conn.WriteMessage(websocket.TextMessage, data)\n}\n\nfunc extractHost(urlStr string) string {\n\tparsed, err := url.Parse(urlStr)\n\tif err == nil && parsed.Host != \"\" {\n\t\tif hostname := parsed.Hostname(); hostname != \"\" {\n\t\t\treturn hostname\n\t\t}\n\t\treturn parsed.Host\n\t}\n\n\ttrimmed := strings.TrimPrefix(strings.TrimPrefix(urlStr, \"wss://\"), \"ws://\")\n\tfor i, c := range trimmed {\n\t\tif c == '/' || c == ':' {\n\t\t\treturn trimmed[:i]\n\t\t}\n\t}\n\treturn trimmed\n}\n"
  },
  {
    "path": "connector/hostinfo.go",
    "content": "package connector\n\nimport (\n\t\"os\"\n\t\"runtime\"\n\t\"time\"\n)\n\n// HostInfo contains system metrics from the host machine\ntype HostInfo struct {\n\tHostname    string `json:\"hostname\"`\n\tOS          string `json:\"os\"`\n\tArch        string `json:\"arch\"`\n\tCPUCores    int    `json:\"cpu_cores\"`\n\tMemoryTotal uint64 `json:\"memory_total\"` // bytes\n\tMemoryFree  uint64 `json:\"memory_free\"`  // bytes\n\tDiskTotal   uint64 `json:\"disk_total\"`   // bytes\n\tDiskFree    uint64 `json:\"disk_free\"`    // bytes\n\tGoVersion   string `json:\"go_version\"`\n\tAgentUptime int64  `json:\"agent_uptime\"` // seconds\n\tCollectedAt string `json:\"collected_at\"` // ISO 8601\n}\n\n// CollectHostInfo gathers system metrics from the host machine\nfunc CollectHostInfo(agentStartTime time.Time) *HostInfo {\n\tinfo := &HostInfo{\n\t\tOS:          runtime.GOOS,\n\t\tArch:        runtime.GOARCH,\n\t\tCPUCores:    runtime.NumCPU(),\n\t\tGoVersion:   runtime.Version(),\n\t\tAgentUptime: int64(time.Since(agentStartTime).Seconds()),\n\t\tCollectedAt: time.Now().UTC().Format(time.RFC3339),\n\t}\n\n\t// Hostname\n\tif hostname, err := os.Hostname(); err == nil {\n\t\tinfo.Hostname = hostname\n\t} else {\n\t\tinfo.Hostname = \"unknown\"\n\t}\n\n\t// Memory stats (use Go runtime as cross-platform source)\n\tinfo.MemoryTotal, info.MemoryFree = getMemoryInfo()\n\n\t// Disk stats for root filesystem (platform-specific)\n\tinfo.DiskTotal, info.DiskFree = getDiskInfo()\n\n\treturn info\n}\n\n// getMemoryInfo returns total and free memory in bytes\n// Uses runtime.MemStats as a cross-platform approach\nfunc getMemoryInfo() (total, free uint64) {\n\tvar m runtime.MemStats\n\truntime.ReadMemStats(&m)\n\n\t// Use runtime stats as cross-platform source\n\t// Sys is total memory obtained from OS\n\t// Alloc is memory currently in use\n\ttotal = m.Sys\n\tfree = m.Sys - m.Alloc\n\treturn\n}\n"
  },
  {
    "path": "connector/hostinfo_unix.go",
    "content": "//go:build !windows\n\npackage connector\n\nimport \"syscall\"\n\n// getDiskInfo returns total and free disk space for the root filesystem\nfunc getDiskInfo() (total, free uint64) {\n\tvar stat syscall.Statfs_t\n\tif err := syscall.Statfs(\"/\", &stat); err != nil {\n\t\treturn 0, 0\n\t}\n\n\ttotal = stat.Blocks * uint64(stat.Bsize)\n\tfree = stat.Bfree * uint64(stat.Bsize)\n\treturn\n}\n"
  },
  {
    "path": "connector/hostinfo_windows.go",
    "content": "//go:build windows\n\npackage connector\n\nimport (\n\t\"syscall\"\n\t\"unsafe\"\n)\n\n// getDiskInfo returns total and free disk space for the C: drive\nfunc getDiskInfo() (total, free uint64) {\n\tkernel32 := syscall.MustLoadDLL(\"kernel32.dll\")\n\tgetDiskFreeSpaceEx := kernel32.MustFindProc(\"GetDiskFreeSpaceExW\")\n\n\tvar freeBytesAvailable, totalNumberOfBytes, totalNumberOfFreeBytes uint64\n\n\tpath, _ := syscall.UTF16PtrFromString(\"C:\\\\\")\n\tr, _, _ := getDiskFreeSpaceEx.Call(\n\t\tuintptr(unsafe.Pointer(path)),\n\t\tuintptr(unsafe.Pointer(&freeBytesAvailable)),\n\t\tuintptr(unsafe.Pointer(&totalNumberOfBytes)),\n\t\tuintptr(unsafe.Pointer(&totalNumberOfFreeBytes)),\n\t)\n\n\tif r == 0 {\n\t\treturn 0, 0\n\t}\n\n\treturn totalNumberOfBytes, totalNumberOfFreeBytes\n}\n"
  },
  {
    "path": "connector/protocol.go",
    "content": "package connector\n\n// GatewayMessage represents messages received from the CH-UI tunnel server.\ntype GatewayMessage struct {\n\tType     string `json:\"type\"`               // Message type: auth_ok, auth_error, query, query_stream, ping, cancel_query, test_connection\n\tQueryID  string `json:\"query_id,omitempty\"` // Query identifier\n\tQuery    string `json:\"query,omitempty\"`    // SQL query to execute\n\tUser     string `json:\"user,omitempty\"`     // ClickHouse username for this query\n\tPassword string `json:\"password,omitempty\"` // ClickHouse password for this query\n\tFormat   string            `json:\"format,omitempty\"`   // ClickHouse output format (JSONCompact, stream, etc.)\n\tError    string            `json:\"error,omitempty\"`    // Error message (for auth_error)\n\tMessage  string            `json:\"message,omitempty\"`  // Additional message info\n\tSettings map[string]string `json:\"settings,omitempty\"` // ClickHouse query settings (URL params)\n}\n\n// AgentMessage represents messages sent to the CH-UI tunnel server.\ntype AgentMessage struct {\n\tType      string      `json:\"type\"`               // Message type: auth, pong, query_result, query_error, test_result, host_info, query_stream_*\n\tQueryID   string      `json:\"query_id,omitempty\"` // Query identifier (for query responses)\n\tToken     string      `json:\"token,omitempty\"`    // Tunnel token (for auth message)\n\tTakeover  bool        `json:\"takeover,omitempty\"` // Request takeover of an existing session for this token\n\tData      interface{} `json:\"data,omitempty\"`     // Query result data\n\tMeta      interface{} `json:\"meta,omitempty\"`     // Query result metadata\n\tStats     *QueryStats `json:\"statistics,omitempty\"`\n\tError     string      `json:\"error,omitempty\"`      // Error message\n\tVersion   string      `json:\"version,omitempty\"`    // ClickHouse version (for test_result)\n\tOnline    bool        `json:\"online,omitempty\"`     // Connection status (for test_result)\n\tHostInfo  *HostInfo   `json:\"host_info,omitempty\"`  // Host machine metrics\n\tSeq       int         `json:\"seq,omitempty\"`        // Chunk sequence number (for streaming)\n\tTotalRows int64       `json:\"total_rows,omitempty\"` // Total row count (for streaming)\n}\n\n// QueryStats contains query execution statistics\ntype QueryStats struct {\n\tElapsed   float64 `json:\"elapsed\"`\n\tRowsRead  uint64  `json:\"rows_read\"`\n\tBytesRead uint64  `json:\"bytes_read\"`\n}\n\n// Message types from gateway\nconst (\n\tMsgTypeAuthOK         = \"auth_ok\"\n\tMsgTypeAuthError      = \"auth_error\"\n\tMsgTypeQuery          = \"query\"\n\tMsgTypeQueryStream    = \"query_stream\"\n\tMsgTypePing           = \"ping\"\n\tMsgTypeCancelQuery    = \"cancel_query\"\n\tMsgTypeTestConnection = \"test_connection\"\n)\n\n// Message types to gateway\nconst (\n\tMsgTypeAuth             = \"auth\"\n\tMsgTypePong             = \"pong\"\n\tMsgTypeQueryResult      = \"query_result\"\n\tMsgTypeQueryError       = \"query_error\"\n\tMsgTypeTestResult       = \"test_result\"\n\tMsgTypeHostInfo         = \"host_info\"\n\tMsgTypeQueryStreamStart = \"query_stream_start\"\n\tMsgTypeQueryStreamChunk = \"query_stream_chunk\"\n\tMsgTypeQueryStreamEnd   = \"query_stream_end\"\n\tMsgTypeQueryStreamError = \"query_stream_error\"\n)\n"
  },
  {
    "path": "connector/service/launchd.go",
    "content": "package service\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"os/exec\"\n\t\"path/filepath\"\n\t\"strconv\"\n\t\"strings\"\n)\n\nconst launchdPlistTemplate = `<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n    <key>Label</key>\n    <string>%s</string>\n    <key>ProgramArguments</key>\n    <array>\n        <string>%s</string>\n        <string>connect</string>\n        <string>--config</string>\n        <string>%s</string>\n    </array>\n    <key>RunAtLoad</key>\n    <true/>\n    <key>KeepAlive</key>\n    <dict>\n        <key>SuccessfulExit</key>\n        <false/>\n    </dict>\n    <key>ThrottleInterval</key>\n    <integer>5</integer>\n    <key>StandardOutPath</key>\n    <string>%s</string>\n    <key>StandardErrorPath</key>\n    <string>%s</string>\n    <key>WorkingDirectory</key>\n    <string>/tmp</string>\n</dict>\n</plist>\n`\n\nfunc (m *Manager) launchdPlistPath() string {\n\thome, _ := os.UserHomeDir()\n\treturn filepath.Join(home, \"Library\", \"LaunchAgents\", ServiceLabel+\".plist\")\n}\n\nfunc (m *Manager) launchdLogDir() string {\n\thome, _ := os.UserHomeDir()\n\treturn filepath.Join(home, \"Library\", \"Logs\", \"ch-ui\")\n}\n\nfunc (m *Manager) launchdLogPath() string {\n\treturn filepath.Join(m.launchdLogDir(), \"agent.log\")\n}\n\nfunc (m *Manager) launchdIsInstalled() bool {\n\treturn fileExists(m.launchdPlistPath())\n}\n\nfunc (m *Manager) launchdIsRunning() (bool, error) {\n\toutput, err := runCommand(\"launchctl\", \"list\")\n\tif err != nil {\n\t\treturn false, err\n\t}\n\treturn strings.Contains(output, ServiceLabel), nil\n}\n\nfunc (m *Manager) launchdInstall(configPath string) error {\n\t// Create log directory\n\tlogDir := m.launchdLogDir()\n\tif err := os.MkdirAll(logDir, 0755); err != nil {\n\t\treturn fmt.Errorf(\"failed to create log directory: %w\", err)\n\t}\n\n\t// Create LaunchAgents directory if it doesn't exist\n\tagentsDir := filepath.Dir(m.launchdPlistPath())\n\tif err := os.MkdirAll(agentsDir, 0755); err != nil {\n\t\treturn fmt.Errorf(\"failed to create LaunchAgents directory: %w\", err)\n\t}\n\n\t// Generate plist content\n\tlogPath := m.launchdLogPath()\n\tplistContent := fmt.Sprintf(launchdPlistTemplate,\n\t\tServiceLabel,\n\t\tBinaryPath,\n\t\tconfigPath,\n\t\tlogPath,\n\t\tlogPath,\n\t)\n\n\t// Write plist file\n\tif err := os.WriteFile(m.launchdPlistPath(), []byte(plistContent), 0644); err != nil {\n\t\treturn fmt.Errorf(\"failed to write plist file: %w\", err)\n\t}\n\n\t// Load the service\n\t_, err := runCommand(\"launchctl\", \"load\", m.launchdPlistPath())\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to load service: %w\", err)\n\t}\n\n\treturn nil\n}\n\nfunc (m *Manager) launchdUninstall() error {\n\t// Stop the service first (ignore errors if not running)\n\t_ = m.launchdStop()\n\n\t// Unload the service\n\tif m.launchdIsInstalled() {\n\t\trunCommand(\"launchctl\", \"unload\", m.launchdPlistPath())\n\t}\n\n\t// Remove plist file\n\tplistPath := m.launchdPlistPath()\n\tif fileExists(plistPath) {\n\t\tif err := os.Remove(plistPath); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to remove plist file: %w\", err)\n\t\t}\n\t}\n\n\treturn nil\n}\n\nfunc (m *Manager) launchdStart() error {\n\tif !m.launchdIsInstalled() {\n\t\treturn fmt.Errorf(\"service not installed. Run 'ch-ui service install' first\")\n\t}\n\n\t// Check if already running\n\trunning, _ := m.launchdIsRunning()\n\tif running {\n\t\treturn fmt.Errorf(\"service is already running\")\n\t}\n\n\t// Start the service\n\t_, err := runCommand(\"launchctl\", \"start\", ServiceLabel)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to start service: %w\", err)\n\t}\n\n\treturn nil\n}\n\nfunc (m *Manager) launchdStop() error {\n\trunning, _ := m.launchdIsRunning()\n\tif !running {\n\t\treturn fmt.Errorf(\"service is not running\")\n\t}\n\n\t_, err := runCommand(\"launchctl\", \"stop\", ServiceLabel)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to stop service: %w\", err)\n\t}\n\n\treturn nil\n}\n\nfunc (m *Manager) launchdRestart() error {\n\tif !m.launchdIsInstalled() {\n\t\treturn fmt.Errorf(\"service not installed. Run 'ch-ui service install' first\")\n\t}\n\n\t// Stop if running\n\trunning, _ := m.launchdIsRunning()\n\tif running {\n\t\trunCommand(\"launchctl\", \"stop\", ServiceLabel)\n\t}\n\n\t// Start the service\n\t_, err := runCommand(\"launchctl\", \"start\", ServiceLabel)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to restart service: %w\", err)\n\t}\n\n\treturn nil\n}\n\nfunc (m *Manager) launchdStatus() (string, error) {\n\tif !m.launchdIsInstalled() {\n\t\treturn \"not installed\", nil\n\t}\n\n\toutput, _ := runCommand(\"launchctl\", \"list\")\n\tlines := strings.Split(output, \"\\n\")\n\tfor _, line := range lines {\n\t\tif strings.Contains(line, ServiceLabel) {\n\t\t\tparts := strings.Fields(line)\n\t\t\tif len(parts) >= 2 {\n\t\t\t\tpid := parts[0]\n\t\t\t\tstatus := parts[1]\n\t\t\t\tif pid != \"-\" {\n\t\t\t\t\treturn fmt.Sprintf(\"running (PID: %s)\", pid), nil\n\t\t\t\t}\n\t\t\t\tif status != \"0\" {\n\t\t\t\t\treturn fmt.Sprintf(\"stopped (last exit: %s)\", status), nil\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn \"stopped\", nil\n\t\t}\n\t}\n\n\treturn \"not running\", nil\n}\n\nfunc (m *Manager) launchdLogs(follow bool, lines int) error {\n\tlogPath := m.launchdLogPath()\n\n\tif !fileExists(logPath) {\n\t\tfmt.Println(\"No logs found yet. Service may not have started.\")\n\t\treturn nil\n\t}\n\n\tif follow {\n\t\tcmd := exec.Command(\"tail\", \"-f\", \"-n\", strconv.Itoa(lines), logPath)\n\t\tcmd.Stdout = os.Stdout\n\t\tcmd.Stderr = os.Stderr\n\t\treturn cmd.Run()\n\t}\n\n\tcmd := exec.Command(\"tail\", \"-n\", strconv.Itoa(lines), logPath)\n\tcmd.Stdout = os.Stdout\n\tcmd.Stderr = os.Stderr\n\treturn cmd.Run()\n}\n"
  },
  {
    "path": "connector/service/service.go",
    "content": "package service\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"os/exec\"\n\t\"path/filepath\"\n\t\"runtime\"\n\t\"strings\"\n)\n\nconst (\n\tServiceName      = \"ch-ui\"\n\tServiceLabel     = \"com.ch-ui\"\n\tBinaryPath       = \"/usr/local/bin/ch-ui\"\n\tSystemConfigDir  = \"/etc/ch-ui\"\n\tSystemConfigPath = \"/etc/ch-ui/config.yaml\"\n)\n\n// Manager provides cross-platform service management\ntype Manager struct {\n\tplatform string\n}\n\n// New creates a new service manager for the current platform\nfunc New() *Manager {\n\treturn &Manager{platform: runtime.GOOS}\n}\n\n// IsInstalled checks if the service is installed\nfunc (m *Manager) IsInstalled() bool {\n\tswitch m.platform {\n\tcase \"darwin\":\n\t\treturn m.launchdIsInstalled()\n\tcase \"linux\":\n\t\treturn m.systemdIsInstalled()\n\tdefault:\n\t\treturn false\n\t}\n}\n\n// IsRunning checks if the service is currently running\nfunc (m *Manager) IsRunning() (bool, error) {\n\tswitch m.platform {\n\tcase \"darwin\":\n\t\treturn m.launchdIsRunning()\n\tcase \"linux\":\n\t\treturn m.systemdIsRunning()\n\tdefault:\n\t\treturn false, fmt.Errorf(\"unsupported platform: %s\", m.platform)\n\t}\n}\n\n// Install installs the service\nfunc (m *Manager) Install(configPath string) error {\n\tswitch m.platform {\n\tcase \"darwin\":\n\t\treturn m.launchdInstall(configPath)\n\tcase \"linux\":\n\t\treturn m.systemdInstall(configPath)\n\tdefault:\n\t\treturn fmt.Errorf(\"unsupported platform: %s\", m.platform)\n\t}\n}\n\n// Uninstall removes the service\nfunc (m *Manager) Uninstall() error {\n\tswitch m.platform {\n\tcase \"darwin\":\n\t\treturn m.launchdUninstall()\n\tcase \"linux\":\n\t\treturn m.systemdUninstall()\n\tdefault:\n\t\treturn fmt.Errorf(\"unsupported platform: %s\", m.platform)\n\t}\n}\n\n// Start starts the service\nfunc (m *Manager) Start() error {\n\tswitch m.platform {\n\tcase \"darwin\":\n\t\treturn m.launchdStart()\n\tcase \"linux\":\n\t\treturn m.systemdStart()\n\tdefault:\n\t\treturn fmt.Errorf(\"unsupported platform: %s\", m.platform)\n\t}\n}\n\n// Stop stops the service\nfunc (m *Manager) Stop() error {\n\tswitch m.platform {\n\tcase \"darwin\":\n\t\treturn m.launchdStop()\n\tcase \"linux\":\n\t\treturn m.systemdStop()\n\tdefault:\n\t\treturn fmt.Errorf(\"unsupported platform: %s\", m.platform)\n\t}\n}\n\n// Restart restarts the service\nfunc (m *Manager) Restart() error {\n\tswitch m.platform {\n\tcase \"darwin\":\n\t\treturn m.launchdRestart()\n\tcase \"linux\":\n\t\treturn m.systemdRestart()\n\tdefault:\n\t\treturn fmt.Errorf(\"unsupported platform: %s\", m.platform)\n\t}\n}\n\n// Status returns the service status as a string\nfunc (m *Manager) Status() (string, error) {\n\tswitch m.platform {\n\tcase \"darwin\":\n\t\treturn m.launchdStatus()\n\tcase \"linux\":\n\t\treturn m.systemdStatus()\n\tdefault:\n\t\treturn \"\", fmt.Errorf(\"unsupported platform: %s\", m.platform)\n\t}\n}\n\n// Logs returns recent service logs\nfunc (m *Manager) Logs(follow bool, lines int) error {\n\tswitch m.platform {\n\tcase \"darwin\":\n\t\treturn m.launchdLogs(follow, lines)\n\tcase \"linux\":\n\t\treturn m.systemdLogs(follow, lines)\n\tdefault:\n\t\treturn fmt.Errorf(\"unsupported platform: %s\", m.platform)\n\t}\n}\n\n// GetLogPath returns the path to the log file (for macOS)\nfunc (m *Manager) GetLogPath() string {\n\tswitch m.platform {\n\tcase \"darwin\":\n\t\thome, _ := os.UserHomeDir()\n\t\treturn filepath.Join(home, \"Library\", \"Logs\", \"ch-ui\", \"agent.log\")\n\tcase \"linux\":\n\t\treturn \"\" // Uses journald\n\tdefault:\n\t\treturn \"\"\n\t}\n}\n\n// Platform returns the current platform\nfunc (m *Manager) Platform() string {\n\treturn m.platform\n}\n\n// NeedsSudo returns true if sudo is needed for service operations\nfunc (m *Manager) NeedsSudo() bool {\n\t// macOS launchd user agents don't need sudo\n\t// Linux systemd system services need sudo\n\treturn m.platform == \"linux\"\n}\n\n// runCommand runs a command and returns combined output\nfunc runCommand(name string, args ...string) (string, error) {\n\tcmd := exec.Command(name, args...)\n\toutput, err := cmd.CombinedOutput()\n\treturn strings.TrimSpace(string(output)), err\n}\n\n// runCommandWithSudo runs a command with sudo if needed\nfunc runCommandWithSudo(needsSudo bool, name string, args ...string) (string, error) {\n\tif needsSudo && os.Geteuid() != 0 {\n\t\targs = append([]string{name}, args...)\n\t\tname = \"sudo\"\n\t}\n\treturn runCommand(name, args...)\n}\n\n// fileExists checks if a file exists\nfunc fileExists(path string) bool {\n\t_, err := os.Stat(path)\n\treturn err == nil\n}\n\n// GetConfigPath returns the appropriate config path based on platform\nfunc GetConfigPath() string {\n\tswitch runtime.GOOS {\n\tcase \"darwin\":\n\t\thome, _ := os.UserHomeDir()\n\t\treturn filepath.Join(home, \".config\", \"ch-ui\", \"config.yaml\")\n\tdefault:\n\t\treturn SystemConfigPath\n\t}\n}\n\n// GetConfigDir returns the appropriate config directory based on platform\nfunc GetConfigDir() string {\n\tswitch runtime.GOOS {\n\tcase \"darwin\":\n\t\thome, _ := os.UserHomeDir()\n\t\treturn filepath.Join(home, \".config\", \"ch-ui\")\n\tdefault:\n\t\treturn SystemConfigDir\n\t}\n}\n"
  },
  {
    "path": "connector/service/systemd.go",
    "content": "package service\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"os/exec\"\n\t\"strconv\"\n\t\"strings\"\n)\n\nconst systemdServiceTemplate = `[Unit]\nDescription=CH-UI Tunnel\nDocumentation=https://ch-ui.com/docs\nAfter=network-online.target\nWants=network-online.target\n\n[Service]\nType=simple\nExecStart=%s connect --config %s\nRestart=always\nRestartSec=5\nStandardOutput=journal\nStandardError=journal\nSyslogIdentifier=%s\n\n# Security hardening\nNoNewPrivileges=true\nProtectSystem=strict\nProtectHome=read-only\nPrivateTmp=true\nReadWritePaths=%s\n\n[Install]\nWantedBy=multi-user.target\n`\n\nconst systemdServicePath = \"/etc/systemd/system/ch-ui.service\"\n\nfunc (m *Manager) systemdIsInstalled() bool {\n\treturn fileExists(systemdServicePath)\n}\n\nfunc (m *Manager) systemdIsRunning() (bool, error) {\n\toutput, err := runCommand(\"systemctl\", \"is-active\", ServiceName)\n\tif err != nil {\n\t\treturn false, nil // Not running or not installed\n\t}\n\treturn strings.TrimSpace(output) == \"active\", nil\n}\n\nfunc (m *Manager) systemdInstall(configPath string) error {\n\t// Create config directory with proper permissions\n\tconfigDir := SystemConfigDir\n\tif err := os.MkdirAll(configDir, 0755); err != nil {\n\t\t// Try with sudo\n\t\t_, err = runCommandWithSudo(true, \"mkdir\", \"-p\", configDir)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to create config directory: %w\", err)\n\t\t}\n\t}\n\n\t// Generate service content\n\tserviceContent := fmt.Sprintf(systemdServiceTemplate,\n\t\tBinaryPath,\n\t\tconfigPath,\n\t\tServiceName,\n\t\tconfigDir,\n\t)\n\n\t// Write service file (needs sudo)\n\ttmpFile := \"/tmp/ch-ui-agent.service\"\n\tif err := os.WriteFile(tmpFile, []byte(serviceContent), 0644); err != nil {\n\t\treturn fmt.Errorf(\"failed to write service file: %w\", err)\n\t}\n\tdefer os.Remove(tmpFile)\n\n\t// Move to systemd directory\n\t_, err := runCommandWithSudo(true, \"mv\", tmpFile, systemdServicePath)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to install service file: %w\", err)\n\t}\n\n\t// Set permissions\n\trunCommandWithSudo(true, \"chmod\", \"644\", systemdServicePath)\n\n\t// Reload systemd\n\t_, err = runCommandWithSudo(true, \"systemctl\", \"daemon-reload\")\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to reload systemd: %w\", err)\n\t}\n\n\t// Enable the service\n\t_, err = runCommandWithSudo(true, \"systemctl\", \"enable\", ServiceName)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to enable service: %w\", err)\n\t}\n\n\t// Start the service\n\t_, err = runCommandWithSudo(true, \"systemctl\", \"start\", ServiceName)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to start service: %w\", err)\n\t}\n\n\treturn nil\n}\n\nfunc (m *Manager) systemdUninstall() error {\n\t// Stop the service\n\trunCommandWithSudo(true, \"systemctl\", \"stop\", ServiceName)\n\n\t// Disable the service\n\trunCommandWithSudo(true, \"systemctl\", \"disable\", ServiceName)\n\n\t// Remove service file\n\tif fileExists(systemdServicePath) {\n\t\t_, err := runCommandWithSudo(true, \"rm\", systemdServicePath)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to remove service file: %w\", err)\n\t\t}\n\t}\n\n\t// Reload systemd\n\trunCommandWithSudo(true, \"systemctl\", \"daemon-reload\")\n\n\treturn nil\n}\n\nfunc (m *Manager) systemdStart() error {\n\tif !m.systemdIsInstalled() {\n\t\treturn fmt.Errorf(\"service not installed. Run 'ch-ui service install' first\")\n\t}\n\n\trunning, _ := m.systemdIsRunning()\n\tif running {\n\t\treturn fmt.Errorf(\"service is already running\")\n\t}\n\n\t_, err := runCommandWithSudo(true, \"systemctl\", \"start\", ServiceName)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to start service: %w\", err)\n\t}\n\n\treturn nil\n}\n\nfunc (m *Manager) systemdStop() error {\n\trunning, _ := m.systemdIsRunning()\n\tif !running {\n\t\treturn fmt.Errorf(\"service is not running\")\n\t}\n\n\t_, err := runCommandWithSudo(true, \"systemctl\", \"stop\", ServiceName)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to stop service: %w\", err)\n\t}\n\n\treturn nil\n}\n\nfunc (m *Manager) systemdRestart() error {\n\tif !m.systemdIsInstalled() {\n\t\treturn fmt.Errorf(\"service not installed. Run 'ch-ui service install' first\")\n\t}\n\n\t_, err := runCommandWithSudo(true, \"systemctl\", \"restart\", ServiceName)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to restart service: %w\", err)\n\t}\n\n\treturn nil\n}\n\nfunc (m *Manager) systemdStatus() (string, error) {\n\tif !m.systemdIsInstalled() {\n\t\treturn \"not installed\", nil\n\t}\n\n\toutput, _ := runCommand(\"systemctl\", \"is-active\", ServiceName)\n\tstatus := strings.TrimSpace(output)\n\n\tswitch status {\n\tcase \"active\":\n\t\t// Get more details\n\t\tdetailOutput, _ := runCommand(\"systemctl\", \"show\", ServiceName, \"--property=MainPID,ActiveEnterTimestamp\")\n\t\tvar pid, since string\n\t\tfor _, line := range strings.Split(detailOutput, \"\\n\") {\n\t\t\tif strings.HasPrefix(line, \"MainPID=\") {\n\t\t\t\tpid = strings.TrimPrefix(line, \"MainPID=\")\n\t\t\t}\n\t\t\tif strings.HasPrefix(line, \"ActiveEnterTimestamp=\") {\n\t\t\t\tsince = strings.TrimPrefix(line, \"ActiveEnterTimestamp=\")\n\t\t\t}\n\t\t}\n\t\tif pid != \"\" && pid != \"0\" {\n\t\t\tif since != \"\" {\n\t\t\t\treturn fmt.Sprintf(\"running (PID: %s, since: %s)\", pid, since), nil\n\t\t\t}\n\t\t\treturn fmt.Sprintf(\"running (PID: %s)\", pid), nil\n\t\t}\n\t\treturn \"running\", nil\n\tcase \"inactive\":\n\t\treturn \"stopped\", nil\n\tcase \"failed\":\n\t\treturn \"failed (check logs with: ch-ui service logs)\", nil\n\tdefault:\n\t\treturn status, nil\n\t}\n}\n\nfunc (m *Manager) systemdLogs(follow bool, lines int) error {\n\targs := []string{\"-u\", ServiceName, \"-n\", strconv.Itoa(lines)}\n\tif follow {\n\t\targs = append(args, \"-f\")\n\t}\n\n\tcmd := exec.Command(\"journalctl\", args...)\n\tcmd.Stdout = os.Stdout\n\tcmd.Stderr = os.Stderr\n\treturn cmd.Run()\n}\n"
  },
  {
    "path": "connector/ui/ui.go",
    "content": "package ui\n\nimport (\n\t\"fmt\"\n\t\"io\"\n\t\"os\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/fatih/color\"\n)\n\n// UI handles formatted terminal output\ntype UI struct {\n\tout      io.Writer\n\tnoColor  bool\n\tquiet    bool\n\tverbose  bool\n\tjsonMode bool\n\n\tgreen   *color.Color\n\tred     *color.Color\n\tyellow  *color.Color\n\tcyan    *color.Color\n\tblue    *color.Color\n\tmagenta *color.Color\n\tbold    *color.Color\n\tdim     *color.Color\n}\n\n// New creates a new UI instance\nfunc New(noColor, quiet, verbose, jsonMode bool) *UI {\n\tif noColor {\n\t\tcolor.NoColor = true\n\t}\n\n\treturn &UI{\n\t\tout:      os.Stdout,\n\t\tnoColor:  noColor,\n\t\tquiet:    quiet,\n\t\tverbose:  verbose,\n\t\tjsonMode: jsonMode,\n\t\tgreen:    color.New(color.FgGreen),\n\t\tred:      color.New(color.FgRed),\n\t\tyellow:   color.New(color.FgYellow),\n\t\tcyan:     color.New(color.FgCyan),\n\t\tblue:     color.New(color.FgBlue),\n\t\tmagenta:  color.New(color.FgMagenta),\n\t\tbold:     color.New(color.Bold),\n\t\tdim:      color.New(color.Faint),\n\t}\n}\n\n// Logo prints the CH-UI ASCII art logo\nfunc (u *UI) Logo(version string) {\n\tif u.quiet || u.jsonMode {\n\t\treturn\n\t}\n\n\tlogo := `\n  ██████╗██╗  ██╗      ██╗   ██╗██╗\n ██╔════╝██║  ██║      ██║   ██║██║\n ██║     ███████║█████╗██║   ██║██║\n ██║     ██╔══██║╚════╝██║   ██║██║\n ╚██████╗██║  ██║      ╚██████╔╝██║\n  ╚═════╝╚═╝  ╚═╝       ╚═════╝ ╚═╝`\n\n\tu.cyan.Println(logo)\n\tu.dim.Printf(\"  Tunnel %s\\n\\n\", version)\n}\n\n// Info prints an info message\nfunc (u *UI) Info(format string, args ...interface{}) {\n\tif u.quiet || u.jsonMode {\n\t\treturn\n\t}\n\tu.cyan.Print(\"→ \")\n\tfmt.Fprintf(u.out, format+\"\\n\", args...)\n}\n\n// Success prints a success message\nfunc (u *UI) Success(format string, args ...interface{}) {\n\tif u.quiet || u.jsonMode {\n\t\treturn\n\t}\n\tu.green.Print(\"✓ \")\n\tfmt.Fprintf(u.out, format+\"\\n\", args...)\n}\n\n// Error prints an error message\nfunc (u *UI) Error(format string, args ...interface{}) {\n\tif u.jsonMode {\n\t\treturn\n\t}\n\tu.red.Print(\"✗ \")\n\tfmt.Fprintf(os.Stderr, format+\"\\n\", args...)\n}\n\n// ErrorType represents the category of error\ntype ErrorType string\n\nconst (\n\tErrorTypeNetwork ErrorType = \"NETWORK\"\n\tErrorTypeAuth    ErrorType = \"AUTH\"\n\tErrorTypeServer  ErrorType = \"SERVER\"\n\tErrorTypeConfig  ErrorType = \"CONFIG\"\n\tErrorTypeUnknown ErrorType = \"UNKNOWN\"\n)\n\n// DiagnosticError prints a detailed error with source, type, and suggestions\nfunc (u *UI) DiagnosticError(errType ErrorType, source, message string, suggestions []string) {\n\tif u.jsonMode {\n\t\treturn\n\t}\n\n\tfmt.Fprintln(os.Stderr)\n\n\t// Error header with type badge\n\tu.red.Fprint(os.Stderr, \"┌─ ERROR \")\n\tu.dim.Fprintf(os.Stderr, \"[%s]\\n\", errType)\n\n\t// Source\n\tu.red.Fprint(os.Stderr, \"│\\n\")\n\tu.red.Fprint(os.Stderr, \"│  \")\n\tu.bold.Fprint(os.Stderr, \"Source: \")\n\tfmt.Fprintln(os.Stderr, source)\n\n\t// Message\n\tu.red.Fprint(os.Stderr, \"│  \")\n\tu.bold.Fprint(os.Stderr, \"Error:  \")\n\tfmt.Fprintln(os.Stderr, message)\n\n\t// Suggestions\n\tif len(suggestions) > 0 {\n\t\tu.red.Fprint(os.Stderr, \"│\\n\")\n\t\tu.red.Fprint(os.Stderr, \"│  \")\n\t\tu.yellow.Fprintln(os.Stderr, \"Possible causes:\")\n\t\tfor _, s := range suggestions {\n\t\t\tu.red.Fprint(os.Stderr, \"│    \")\n\t\t\tu.dim.Fprint(os.Stderr, \"• \")\n\t\t\tfmt.Fprintln(os.Stderr, s)\n\t\t}\n\t}\n\n\tu.red.Fprint(os.Stderr, \"│\\n\")\n\tu.red.Fprintln(os.Stderr, \"└─\")\n\tfmt.Fprintln(os.Stderr)\n}\n\n// AuthError prints an authentication-specific error with helpful context\nfunc (u *UI) AuthError(serverMessage string) {\n\tsource := \"CH-UI Server\"\n\tvar suggestions []string\n\n\t// Classify the error and provide specific suggestions\n\tswitch {\n\tcase strings.Contains(strings.ToLower(serverMessage), \"invalid\") && strings.Contains(strings.ToLower(serverMessage), \"token\"):\n\t\tsuggestions = []string{\n\t\t\t\"The tunnel token is invalid or has been revoked\",\n\t\t\t\"Check that you copied the complete token (starts with 'cht_')\",\n\t\t\t\"Generate a new token on the server with: ch-ui tunnel create --name <connection-name>\",\n\t\t\t\"Verify the token belongs to the target server instance\",\n\t\t}\n\tcase strings.Contains(strings.ToLower(serverMessage), \"license\"):\n\t\tsuggestions = []string{\n\t\t\t\"The server license may have expired\",\n\t\t\t\"Contact your administrator to renew the license\",\n\t\t\t\"Check server logs for license validation details\",\n\t\t}\n\tcase strings.Contains(strings.ToLower(serverMessage), \"already connected\"):\n\t\tsuggestions = []string{\n\t\t\t\"Another agent process is already connected with this token\",\n\t\t\t\"Stop the existing process or service before starting a new one\",\n\t\t\t\"Use 'ch-ui service status' to check service mode\",\n\t\t\t\"Reconnect with '--takeover' to replace the active session\",\n\t\t}\n\tcase strings.Contains(strings.ToLower(serverMessage), \"not found\"):\n\t\tsuggestions = []string{\n\t\t\t\"The organization associated with this token may have been deleted\",\n\t\t\t\"The tunnel connection may have been removed\",\n\t\t\t\"Contact your administrator\",\n\t\t}\n\tdefault:\n\t\tsuggestions = []string{\n\t\t\t\"Check that your token is valid and not expired\",\n\t\t\t\"Verify the tunnel URL is correct\",\n\t\t\t\"Check CH-UI server logs for tunnel auth errors\",\n\t\t}\n\t}\n\n\tu.DiagnosticError(ErrorTypeAuth, source, serverMessage, suggestions)\n}\n\n// ConnectionError prints a connection-specific error\nfunc (u *UI) ConnectionError(err error, tunnelURL string) {\n\tsource := fmt.Sprintf(\"Connection to %s\", tunnelURL)\n\tmessage := err.Error()\n\tvar suggestions []string\n\n\tswitch {\n\tcase strings.Contains(message, \"connection refused\"):\n\t\tsuggestions = []string{\n\t\t\t\"The CH-UI server may be down or unreachable\",\n\t\t\t\"Check if the tunnel URL is correct: \" + tunnelURL,\n\t\t\t\"Verify your network/firewall allows outbound WebSocket connections\",\n\t\t\t\"If using a custom server, ensure it's running\",\n\t\t}\n\tcase strings.Contains(message, \"no such host\") || strings.Contains(message, \"lookup\"):\n\t\tsuggestions = []string{\n\t\t\t\"Cannot resolve the tunnel server hostname\",\n\t\t\t\"Check your DNS settings\",\n\t\t\t\"Verify the tunnel URL is correct: \" + tunnelURL,\n\t\t}\n\tcase strings.Contains(message, \"timeout\") || strings.Contains(message, \"deadline\"):\n\t\tsuggestions = []string{\n\t\t\t\"Connection timed out - server may be overloaded or unreachable\",\n\t\t\t\"Check your network connection\",\n\t\t\t\"Try again in a few moments\",\n\t\t}\n\tcase strings.Contains(message, \"certificate\") || strings.Contains(message, \"tls\"):\n\t\tsuggestions = []string{\n\t\t\t\"SSL/TLS certificate error\",\n\t\t\t\"If using a self-signed certificate, this is expected in dev mode\",\n\t\t\t\"Verify the tunnel URL protocol (ws:// vs wss://)\",\n\t\t}\n\tdefault:\n\t\tsuggestions = []string{\n\t\t\t\"Check your network connection\",\n\t\t\t\"Verify the tunnel URL: \" + tunnelURL,\n\t\t\t\"Try running with --verbose for more details\",\n\t\t}\n\t}\n\n\tu.DiagnosticError(ErrorTypeNetwork, source, message, suggestions)\n}\n\n// Warn prints a warning message\nfunc (u *UI) Warn(format string, args ...interface{}) {\n\tif u.quiet || u.jsonMode {\n\t\treturn\n\t}\n\tu.yellow.Print(\"! \")\n\tfmt.Fprintf(u.out, format+\"\\n\", args...)\n}\n\n// Debug prints a debug message (only in verbose mode)\nfunc (u *UI) Debug(format string, args ...interface{}) {\n\tif !u.verbose || u.jsonMode {\n\t\treturn\n\t}\n\tu.dim.Printf(\"[debug] \"+format+\"\\n\", args...)\n}\n\n// Status prints the connection status block\nfunc (u *UI) Status(tunnelURL, clickhouseURL string, uptime time.Duration) {\n\tif u.quiet || u.jsonMode {\n\t\treturn\n\t}\n\n\tfmt.Println()\n\tu.bold.Println(\"  Status:     \", u.green.Sprint(\"Connected\"))\n\tfmt.Printf(\"  Tunnel:     %s\\n\", tunnelURL)\n\tfmt.Printf(\"  ClickHouse: %s\\n\", clickhouseURL)\n\tfmt.Printf(\"  Uptime:     %s\\n\", formatDuration(uptime))\n\tfmt.Println()\n\tu.dim.Println(\"Press Ctrl+C to disconnect\")\n\tfmt.Println()\n}\n\n// QueryLog prints a query execution log line\nfunc (u *UI) QueryLog(queryID string, elapsed time.Duration, rows int) {\n\tif u.quiet || u.jsonMode {\n\t\treturn\n\t}\n\n\ttimestamp := time.Now().Format(\"2006-01-02 15:04:05\")\n\tu.dim.Printf(\"[%s] \", timestamp)\n\tfmt.Printf(\"Query %s executed \", u.cyan.Sprint(queryID[:8]))\n\tu.dim.Printf(\"(%s, %s rows)\\n\", elapsed.Round(time.Millisecond), formatNumber(rows))\n}\n\n// QueryError prints a query error log line\nfunc (u *UI) QueryError(queryID string, err error) {\n\tif u.jsonMode {\n\t\treturn\n\t}\n\n\ttimestamp := time.Now().Format(\"2006-01-02 15:04:05\")\n\tu.dim.Printf(\"[%s] \", timestamp)\n\tu.red.Printf(\"Query %s failed: %v\\n\", queryID[:8], err)\n}\n\n// Disconnected prints a disconnection message\nfunc (u *UI) Disconnected(reason string) {\n\tif u.jsonMode {\n\t\treturn\n\t}\n\tu.yellow.Print(\"! \")\n\tfmt.Printf(\"Disconnected: %s\\n\", reason)\n}\n\n// Reconnecting prints a reconnection message\nfunc (u *UI) Reconnecting(delay time.Duration) {\n\tif u.quiet || u.jsonMode {\n\t\treturn\n\t}\n\tu.cyan.Print(\"→ \")\n\tfmt.Printf(\"Reconnecting in %s...\\n\", delay.Round(time.Millisecond))\n}\n\n// Box prints a boxed message\nfunc (u *UI) Box(title string, lines map[string]string, order []string) {\n\tif u.quiet || u.jsonMode {\n\t\treturn\n\t}\n\n\tfmt.Println()\n\tu.bold.Println(title)\n\tfmt.Println(strings.Repeat(\"─\", len(title)+2))\n\n\tfor _, key := range order {\n\t\tif val, ok := lines[key]; ok {\n\t\t\tfmt.Printf(\"  %-12s %s\\n\", key+\":\", val)\n\t\t}\n\t}\n\tfmt.Println()\n}\n\n// Helpers\n\nfunc formatDuration(d time.Duration) string {\n\tif d < time.Minute {\n\t\treturn fmt.Sprintf(\"%ds\", int(d.Seconds()))\n\t}\n\tif d < time.Hour {\n\t\treturn fmt.Sprintf(\"%dm %ds\", int(d.Minutes()), int(d.Seconds())%60)\n\t}\n\treturn fmt.Sprintf(\"%dh %dm\", int(d.Hours()), int(d.Minutes())%60)\n}\n\nfunc formatNumber(n int) string {\n\tif n < 1000 {\n\t\treturn fmt.Sprintf(\"%d\", n)\n\t}\n\tif n < 1000000 {\n\t\treturn fmt.Sprintf(\"%.1fK\", float64(n)/1000)\n\t}\n\treturn fmt.Sprintf(\"%.1fM\", float64(n)/1000000)\n}\n\n// FormatBytes formats bytes to human readable format\nfunc FormatBytes(b uint64) string {\n\tconst unit = 1024\n\tif b < unit {\n\t\treturn fmt.Sprintf(\"%d B\", b)\n\t}\n\tdiv, exp := uint64(unit), 0\n\tfor n := b / unit; n >= unit; n /= unit {\n\t\tdiv *= unit\n\t\texp++\n\t}\n\treturn fmt.Sprintf(\"%.1f %cB\", float64(b)/float64(div), \"KMGTPE\"[exp])\n}\n"
  },
  {
    "path": "docs/brain/SKILLS.md",
    "content": "# Brain Skills\n\nThis file defines the default instruction set used by Brain across all chats.\nAdmins can copy this content into the Brain Skills editor and create variants.\n\n## Role\n\nYou are **Brain**, a senior ClickHouse analytics copilot.\n\n## Main goals\n\n- Produce correct, executable ClickHouse SQL.\n- Help users move from question -> query -> insight quickly.\n- Stay concise and explicit about assumptions.\n\n## SQL rules\n\n- Prefer read-only exploration first.\n- Use `LIMIT 100` by default for exploratory selects.\n- Avoid `SELECT *` on large tables unless explicitly requested.\n- Always qualify tables with backticks when needed (for example: `` `db.table` ``).\n- If the request is ambiguous, ask one targeted clarification question.\n\n## Safety rules\n\n- Do not suggest destructive SQL (DROP/TRUNCATE/DELETE/ALTER) unless the user asks directly.\n- If the user asks for destructive SQL, include a short warning and confirmation step.\n- For expensive queries, provide a preview query first (sample, top-N, or date-bounded window).\n\n## Artifact contract\n\nWhen query output or derived assets exist, create or reference artifacts with stable titles:\n\n- `SQL Draft: <topic>`\n- `Query Result: <topic>`\n- `Insight Summary: <topic>`\n- `Chart Spec: <topic>`\n\nEach artifact should include:\n\n- Purpose (1 line)\n- Inputs used (query/message references)\n- Output payload (JSON/text/SQL)\n\n## Query tool contract\n\nWhen running SQL tools:\n\n1. Start with read-only SQL.\n2. Keep runtime bounded (small scans first).\n3. Persist output as an artifact.\n4. Summarize findings in 3-5 bullets.\n\n## Response format\n\nDefault assistant response structure:\n\n1. One-line intent confirmation.\n2. SQL block when applicable.\n3. Short explanation.\n4. Optional next-step variants.\n\n## Example pattern\n\n````text\nGot it. You want daily active users by region for the last 30 days.\n\n```sql\nSELECT\n  toDate(event_time) AS day,\n  region,\n  uniq(user_id) AS dau\nFROM `analytics.events`\nWHERE event_time >= now() - INTERVAL 30 DAY\nGROUP BY day, region\nORDER BY day DESC, dau DESC\nLIMIT 100\n```\n\nThis computes DAU by region and keeps the result bounded for quick validation.\nIf you want, I can also return a stacked timeseries version.\n````\n"
  },
  {
    "path": "docs/cant-login.md",
    "content": "# Can't Login?\n\nUse this guide when CH-UI loads but sign-in fails, local connection is wrong, or you are blocked by retry windows.\n\n## Quick Diagnosis\n\n| What you see | Most likely cause | What to do |\n|---|---|---|\n| `Authentication failed` | Wrong ClickHouse username/password | Retry with correct credentials for the selected connection |\n| `Connection unavailable` / `Unreachable` | Local ClickHouse URL is wrong or connector is offline | Update local URL/name, restart CH-UI, then retry |\n| `Too many login attempts` | Repeated failed attempts triggered temporary lock | Wait retry window; if URL/connection was wrong, fix setup and restart before retrying |\n| No connections configured | Embedded local connection was not created/updated correctly | Run setup command below and restart CH-UI |\n\n## Local Recovery (Recommended)\n\n1. Open **Can't login?** in CH-UI login.\n2. Set:\n   - `ClickHouse URL`\n   - `Connection Name`\n3. Restart CH-UI with one of these commands.\n\nGlobal install:\n\n```bash\nch-ui server --clickhouse-url 'http://127.0.0.1:8123' --connection-name 'My Connection 1'\n```\n\nLocal binary:\n\n```bash\n./ch-ui server --clickhouse-url 'http://127.0.0.1:8123' --connection-name 'My Connection 1'\n```\n\nThen open `http://localhost:3488` and sign in again.\n\n## Docker Recovery\n\n```bash\ndocker run --rm \\\n  -p 3488:3488 \\\n  -v ch-ui-data:/app/data \\\n  -e CLICKHOUSE_URL='http://127.0.0.1:8123' \\\n  -e CONNECTION_NAME='My Connection 1' \\\n  ghcr.io/caioricciuti/ch-ui:latest\n```\n\n## Env And Config Alternatives\n\nEnvironment variables:\n\n```bash\nCLICKHOUSE_URL='http://127.0.0.1:8123' CONNECTION_NAME='My Connection 1' ch-ui server\n```\n\nConfig file (`server.yaml`):\n\n```yaml\nclickhouse_url: http://127.0.0.1:8123\nconnection_name: My Connection 1\n```\n\n## Notes\n\n- Local URL setup does **not** require Admin access.\n- Admin and multi-connection management are Pro-only features.\n- Setup commands intentionally exclude passwords; credentials stay in the Sign in form.\n- Connection name precedence: `--connection-name` > `CONNECTION_NAME` > `server.yaml` > `Local ClickHouse`.\n"
  },
  {
    "path": "docs/legal/privacy-policy.md",
    "content": "# Privacy Policy\n\n**Effective date:** February 12, 2026\n**Last updated:** February 12, 2026\n\nCH-UI (\"we\", \"our\", \"us\") is developed by Caio Ricciuti. This privacy policy explains how we handle data when you use CH-UI software.\n\n---\n\n## What CH-UI does NOT collect\n\nCH-UI is a self-hosted application. When you run CH-UI on your own infrastructure:\n\n- **No telemetry** is sent to us or any third party\n- **No usage data** leaves your server\n- **No analytics** are collected\n- **No cookies** are set by us (only session cookies for your own login)\n- **Your queries, data, and database contents never leave your infrastructure**\n\n## Data stored locally\n\nCH-UI stores the following data in a local SQLite database on your server:\n\n- **User sessions** — login tokens for authenticated access\n- **Saved queries** — queries you choose to save\n- **Dashboard configurations** — layout and panel settings (Pro)\n- **Scheduled jobs** — query schedules you create (Pro)\n- **Connection settings** — ClickHouse connection details (encrypted)\n- **License information** — your license key if you activate Pro\n- **Application settings** — preferences and configuration\n\nAll data is stored in the SQLite file specified by `database_path` in your config (default: `./data/ch-ui.db`). You have full control over this data.\n\n## Pro license activation\n\nWhen you activate a Pro license, the license file is stored locally in your database. No information is sent to external servers during activation — the license is validated offline using cryptographic signatures.\n\n## Managed hosting\n\nIf you use a managed CH-UI hosting offering:\n\n- We may store your account information (email, name) for authentication\n- We may store your ClickHouse connection metadata (not your database contents)\n- We do not access, read, or store your ClickHouse data\n- Tunnel connections are end-to-end between your agent and your browser session\n\n## Third-party services\n\nThe self-hosted CH-UI binary does not communicate with any third-party services except:\n\n- **Your ClickHouse server** — as configured by you\n- **OpenAI API** — only if you configure the Brain AI feature (Pro) with your own API key\n\n## Data deletion\n\nSince all data is stored locally:\n\n- Delete the SQLite database file to remove all application data\n- Uninstall the binary to fully remove CH-UI\n\n## Contact\n\nFor privacy questions: **c.ricciuti@ch-ui.com**\n\n## Changes\n\nWe may update this policy. Changes will be posted in this file and noted in release changelogs.\n"
  },
  {
    "path": "docs/legal/terms-of-service.md",
    "content": "# Terms of Service\n\n**Effective date:** February 12, 2026\n**Last updated:** February 12, 2026\n\nThese terms govern your use of CH-UI software developed by Caio Ricciuti.\n\n---\n\n## 1. Software license\n\nCH-UI is distributed under a dual-license model:\n\n- **CH-UI Core** (Community Edition) is licensed under the [Apache License 2.0](../../LICENSE). You may use, modify, and distribute it freely under those terms.\n- **CH-UI Pro** modules require a separate commercial license. Pro features are clearly marked in the application and documentation.\n\n## 2. Self-hosted usage\n\nWhen you run CH-UI on your own infrastructure:\n\n- You are responsible for your own data, backups, and security\n- You are responsible for compliance with applicable laws in your jurisdiction\n- We provide the software \"as is\" without warranty (see Section 6)\n\n## 3. Pro license\n\nIf you purchase a CH-UI Pro license:\n\n- The license grants you access to Pro features for the duration specified\n- Licenses are non-transferable unless agreed in writing\n- License terms are specified in the license file provided to you\n- Tampering with or circumventing license validation is prohibited\n\n## 4. Acceptable use\n\nYou agree not to:\n\n- Reverse-engineer the license validation mechanism\n- Redistribute Pro modules without authorization\n- Use CH-UI to violate applicable laws or regulations\n- Misrepresent CH-UI as your own product\n\n## 5. Intellectual property\n\n- CH-UI, the CH-UI logo, and related marks are the property of Caio Ricciuti\n- Open source components are governed by their respective licenses\n- Your data remains yours — we claim no ownership over data processed by CH-UI\n\n## 6. Disclaimer of warranty\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n## 7. Limitation of liability\n\nTo the maximum extent permitted by law, Caio Ricciuti shall not be liable for any indirect, incidental, special, consequential, or punitive damages, or any loss of profits or revenues, whether incurred directly or indirectly.\n\n## 8. Changes\n\nWe may update these terms. Continued use of the software after changes constitutes acceptance.\n\n## 9. Contact\n\nFor questions about these terms: **c.ricciuti@ch-ui.com**\n"
  },
  {
    "path": "docs/license.md",
    "content": "# CH-UI Licensing\n\nCH-UI uses a dual-license model: open source core + commercial Pro modules.\n\n---\n\n## CH-UI Core (Community Edition)\n\n**License:** [Apache License 2.0](../LICENSE)\n\nThe core of CH-UI is free and open source. This includes:\n\n- SQL Editor (multi-tab, formatting, profiling, streaming results, query plan analysis)\n- Schema Explorer (database/table/column browser, data preview)\n- Saved Queries\n- Dashboards (panel builder, multiple chart types, time ranges)\n- Brain AI Assistant (OpenAI, OpenAI-compatible, Ollama — multi-chat, artifacts, skills)\n- Data Pipelines (Webhook, S3, Kafka, Database sources into ClickHouse)\n- Models (dbt-style SQL transformations with DAG and materialization)\n- Admin Panel (user management, connection management, provider configuration)\n- Multi-connection management\n- Tunnel connector (`ch-ui connect`) for remote ClickHouse access\n- Embedded web frontend\n- All CLI commands\n\nYou can use, modify, and distribute CH-UI Core freely under the Apache 2.0 license.\n\n## CH-UI Pro\n\n**License:** Commercial (proprietary)\n\nPro modules extend CH-UI with enterprise features:\n\n- Scheduled query jobs (cron-based scheduling, execution history, timezone support)\n- Governance (metadata sync, query log analytics, data lineage, access matrix, tagging)\n- Policies and incident management (violation detection, incident workflow, severity tracking)\n- Alerting (SMTP, Resend, Brevo — rules by event type/severity, escalation)\n\nPro features require a valid license file. Licenses are per-deployment and include a customer name, expiration date, and feature set.\n\n### How to activate\n\n1. Open CH-UI in your browser\n2. Go to **Settings > License**\n3. Paste or upload your license file\n4. Pro features unlock immediately\n\n### How to get a license\n\nVisit [ch-ui.com/pricing](https://ch-ui.com/pricing) or contact **c.ricciuti@ch-ui.com**.\n\n## License boundary\n\nThe licensing boundary is enforced server-side via HTTP 402 middleware on Pro-only routes:\n\n- **Free routes:** queries, saved queries, dashboards, pipelines, models, brain, admin, connections\n- **Pro routes:** `/api/schedules/*`, `/api/governance/*` (including alerts)\n\nThe Pro license check is enforced both server-side (HTTP 402 middleware) and client-side (UI gate).\n\n## FAQ\n\n**Can I use CH-UI Core in production?**\nYes, freely. Apache 2.0 allows commercial use.\n\n**Can I modify CH-UI Core?**\nYes. You must retain the copyright notice and license.\n\n**Do I need Pro for dashboards, Brain, or pipelines?**\nNo. Dashboards, Brain AI, data pipelines, models, and admin are all free.\n\n**What features require Pro?**\nOnly scheduled query jobs, governance (lineage, policies, incidents, access matrix), and alerting.\n\n**What happens when a Pro license expires?**\nPro features become locked. Core features continue working. Your data is never lost.\n"
  },
  {
    "path": "docs/production-runbook.md",
    "content": "# CH-UI Production Runbook (VM2 Server + VM1 Connector)\n\nThis runbook covers a production topology where:\n\n- **VM2** runs `ch-ui server` (UI, API, tunnel gateway)\n- **VM1** runs `ch-ui connect` (agent next to ClickHouse)\n\n## 1. VM2 Server Hardening\n\n1. Create server config at `/etc/ch-ui/server.yaml`:\n\n```yaml\nport: 3488\napp_url: https://ch-ui.example.com\napp_secret_key: \"replace-with-long-random-secret\"\nallowed_origins:\n  - https://ch-ui.example.com\ndatabase_path: /var/lib/ch-ui/ch-ui.db\n```\n\n2. Keep runtime state in writable directories:\n\n```bash\nsudo mkdir -p /var/lib/ch-ui/run\nsudo mkdir -p /var/lib/ch-ui\nsudo chown -R chui:chui /var/lib/ch-ui\n```\n\n3. Use lifecycle commands with explicit PID file:\n\n```bash\nch-ui server start -c /etc/ch-ui/server.yaml --detach --pid-file /var/lib/ch-ui/run/ch-ui-server.pid\nch-ui server status -c /etc/ch-ui/server.yaml --pid-file /var/lib/ch-ui/run/ch-ui-server.pid\nch-ui server stop -c /etc/ch-ui/server.yaml --pid-file /var/lib/ch-ui/run/ch-ui-server.pid\n```\n\n## 2. VM2 systemd Service (recommended)\n\nCreate `/etc/systemd/system/ch-ui-server.service`:\n\n```ini\n[Unit]\nDescription=CH-UI Server\nAfter=network.target\n\n[Service]\nType=simple\nUser=chui\nGroup=chui\nWorkingDirectory=/var/lib/ch-ui\nExecStart=/usr/local/bin/ch-ui server start -c /etc/ch-ui/server.yaml --pid-file /var/lib/ch-ui/run/ch-ui-server.pid\nExecStop=/usr/local/bin/ch-ui server stop -c /etc/ch-ui/server.yaml --pid-file /var/lib/ch-ui/run/ch-ui-server.pid\nRestart=always\nRestartSec=5\nLimitNOFILE=65535\n\n[Install]\nWantedBy=multi-user.target\n```\n\nThen:\n\n```bash\nsudo systemctl daemon-reload\nsudo systemctl enable ch-ui-server\nsudo systemctl start ch-ui-server\nsudo systemctl status ch-ui-server\n```\n\n## 3. VM2 Reverse Proxy (TLS + WebSocket)\n\nYour proxy must:\n\n- route app traffic to `127.0.0.1:3488`\n- support WebSocket upgrades on `/connect`\n- keep long-enough timeouts for tunnel traffic\n\nUse the repo example: `ch-ui.conf`.\n\n## 4. VM1 Connector Setup\n\n1. On VM2, create a tunnel key for VM1:\n\n```bash\nch-ui tunnel create --name \"vm1-clickhouse\" -c /etc/ch-ui/server.yaml --url wss://ch-ui.example.com/connect\n```\n\nCopy the generated `cht_...` token.\n\n2. Install connector service on VM1:\n\n```bash\nsudo /usr/local/bin/ch-ui service install \\\n  --url wss://ch-ui.example.com/connect \\\n  --key cht_your_tunnel_token \\\n  --clickhouse-url http://127.0.0.1:8123\n```\n\n3. Verify:\n\n```bash\nch-ui service status\nch-ui service logs -f\n```\n\n4. (Optional) Rotate compromised/old token from VM2:\n\n```bash\nch-ui tunnel list -c /etc/ch-ui/server.yaml\nch-ui tunnel rotate <connection-id> -c /etc/ch-ui/server.yaml --url wss://ch-ui.example.com/connect\n```\n\n## 5. Network Policy\n\n- VM2 inbound: `443` (or your TLS port)\n- VM2 inbound: `3488` only from localhost/reverse-proxy path\n- VM1 outbound: allow to `wss://ch-ui.example.com/connect`\n- VM1 ClickHouse can stay local-only (`127.0.0.1:8123`)\n\n## 6. Monitoring and Backups\n\n1. Health endpoint:\n\n```bash\ncurl -fsS http://127.0.0.1:3488/health\n```\n\n2. Back up SQLite:\n\n- file: `/var/lib/ch-ui/ch-ui.db`\n- schedule daily snapshot + retention policy\n- verify restore procedure quarterly\n\n3. Log collection:\n\n- VM2: `journalctl -u ch-ui-server`\n- VM1: `ch-ui service logs` or platform service logs\n\n## 7. Upgrade Procedure\n\n1. Replace binaries on VM2 and VM1.\n2. Restart services:\n\n```bash\nsudo systemctl restart ch-ui-server\nch-ui service restart\n```\n\n3. Validate:\n\n```bash\nch-ui version\nch-ui server status -c /etc/ch-ui/server.yaml --pid-file /var/lib/ch-ui/run/ch-ui-server.pid\nch-ui service status\n```\n\n## 8. Notes on Older Binaries\n\nOlder builds did not support server lifecycle subcommands (`status/stop/restart`).\nIf `ch-ui server status` starts the server, replace the binary with a newer build and retry.\n"
  },
  {
    "path": "frontend.go",
    "content": "package main\n\nimport (\n\t\"embed\"\n\t\"io/fs\"\n\t\"log/slog\"\n)\n\n//go:embed all:ui/dist\nvar uiDistFS embed.FS\n\nfunc frontendFS() fs.FS {\n\tsub, err := fs.Sub(uiDistFS, \"ui/dist\")\n\tif err != nil {\n\t\tslog.Warn(\"Failed to open embedded frontend directory\", \"error\", err)\n\t\treturn nil\n\t}\n\treturn sub\n}\n"
  },
  {
    "path": "go.mod",
    "content": "module github.com/caioricciuti/ch-ui\n\ngo 1.25.0\n\nrequire (\n\tgithub.com/IBM/sarama v1.47.0\n\tgithub.com/fatih/color v1.18.0\n\tgithub.com/go-chi/chi/v5 v5.2.5\n\tgithub.com/go-sql-driver/mysql v1.9.3\n\tgithub.com/google/uuid v1.6.0\n\tgithub.com/gorilla/websocket v1.5.3\n\tgithub.com/lib/pq v1.11.2\n\tgithub.com/minio/minio-go/v7 v7.0.98\n\tgithub.com/spf13/cobra v1.10.2\n\tgithub.com/xdg-go/scram v1.2.0\n\tgithub.com/xitongsys/parquet-go v1.6.2\n\tgithub.com/xitongsys/parquet-go-source v0.0.0-20241021075129-b732d2ac9c9b\n\tgolang.org/x/crypto v0.48.0\n\tgopkg.in/yaml.v3 v3.0.1\n\tmodernc.org/sqlite v1.44.3\n)\n\nrequire (\n\tfilippo.io/edwards25519 v1.1.0 // indirect\n\tgithub.com/apache/arrow/go/arrow v0.0.0-20200730104253-651201b0f516 // indirect\n\tgithub.com/apache/thrift v0.14.2 // indirect\n\tgithub.com/davecgh/go-spew v1.1.1 // indirect\n\tgithub.com/dustin/go-humanize v1.0.1 // indirect\n\tgithub.com/eapache/go-resiliency v1.7.0 // indirect\n\tgithub.com/eapache/queue v1.1.0 // indirect\n\tgithub.com/go-ini/ini v1.67.0 // indirect\n\tgithub.com/golang/snappy v0.0.3 // indirect\n\tgithub.com/hashicorp/go-uuid v1.0.3 // indirect\n\tgithub.com/inconshreveable/mousetrap v1.1.0 // indirect\n\tgithub.com/jcmturner/aescts/v2 v2.0.0 // indirect\n\tgithub.com/jcmturner/dnsutils/v2 v2.0.0 // indirect\n\tgithub.com/jcmturner/gofork v1.7.6 // indirect\n\tgithub.com/jcmturner/gokrb5/v8 v8.4.4 // indirect\n\tgithub.com/jcmturner/rpc/v2 v2.0.3 // indirect\n\tgithub.com/klauspost/compress v1.18.4 // indirect\n\tgithub.com/klauspost/cpuid/v2 v2.2.11 // indirect\n\tgithub.com/klauspost/crc32 v1.3.0 // indirect\n\tgithub.com/mattn/go-colorable v0.1.13 // indirect\n\tgithub.com/mattn/go-isatty v0.0.20 // indirect\n\tgithub.com/minio/crc64nvme v1.1.1 // indirect\n\tgithub.com/minio/md5-simd v1.1.2 // indirect\n\tgithub.com/ncruces/go-strftime v1.0.0 // indirect\n\tgithub.com/philhofer/fwd v1.2.0 // indirect\n\tgithub.com/pierrec/lz4/v4 v4.1.25 // indirect\n\tgithub.com/rcrowley/go-metrics v0.0.0-20250401214520-65e299d6c5c9 // indirect\n\tgithub.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect\n\tgithub.com/rogpeppe/go-internal v1.14.1 // indirect\n\tgithub.com/rs/xid v1.6.0 // indirect\n\tgithub.com/spf13/pflag v1.0.9 // indirect\n\tgithub.com/tinylib/msgp v1.6.1 // indirect\n\tgithub.com/xdg-go/pbkdf2 v1.0.0 // indirect\n\tgithub.com/xdg-go/stringprep v1.0.4 // indirect\n\tgo.yaml.in/yaml/v3 v3.0.4 // indirect\n\tgolang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 // indirect\n\tgolang.org/x/net v0.51.0 // indirect\n\tgolang.org/x/sys v0.41.0 // indirect\n\tgolang.org/x/text v0.34.0 // indirect\n\tgolang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect\n\tmodernc.org/libc v1.67.6 // indirect\n\tmodernc.org/mathutil v1.7.1 // indirect\n\tmodernc.org/memory v1.11.0 // indirect\n)\n"
  },
  {
    "path": "go.sum",
    "content": "cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=\ncloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=\ncloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=\ncloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=\ncloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=\ncloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=\ncloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=\ncloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=\ncloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=\ncloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=\ncloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=\ncloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=\ncloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=\ncloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc=\ncloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=\ncloud.google.com/go v0.66.0/go.mod h1:dgqGAjKCDxyhGTtC9dAREQGUJpkceNm1yt590Qno0Ko=\ncloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI=\ncloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk=\ncloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg=\ncloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8=\ncloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0=\ncloud.google.com/go v0.82.0/go.mod h1:vlKccHJGuFBFufnAnuB08dfEH9Y3H7dzDzRECFdC2TA=\ncloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY=\ncloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM=\ncloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY=\ncloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ=\ncloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI=\ncloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4=\ncloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc=\ncloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA=\ncloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U=\ncloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A=\ncloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=\ncloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=\ncloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=\ncloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=\ncloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=\ncloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=\ncloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow=\ncloud.google.com/go/compute v1.2.0/go.mod h1:xlogom/6gr8RJGBe7nT2eGsQYAFUbbv8dbC29qE3Xmw=\ncloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM=\ncloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M=\ncloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=\ncloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=\ncloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY=\ncloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c=\ncloud.google.com/go/iam v0.1.1/go.mod h1:CKqrcnI/suGpybEHxZ7BMehL0oA4LpdyJdUlTl9jVMw=\ncloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY=\ncloud.google.com/go/kms v1.1.0/go.mod h1:WdbppnCDMDpOvoYBMn1+gNmOeEoZYqAv+HeuKARGCXI=\ncloud.google.com/go/kms v1.4.0/go.mod h1:fajBHndQ+6ubNw6Ss2sSd+SWvjL26RNo/dr7uxsnnOA=\ncloud.google.com/go/monitoring v1.1.0/go.mod h1:L81pzz7HKn14QCMaCs6NTQkdBnE87TElyanS95vIcl4=\ncloud.google.com/go/monitoring v1.4.0/go.mod h1:y6xnxfwI3hTFWOdkOaD7nfJVlwuC3/mS/5kvtT131p4=\ncloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=\ncloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=\ncloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=\ncloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=\ncloud.google.com/go/pubsub v1.19.0/go.mod h1:/O9kmSe9bb9KRnIAWkzmqhPjHo6LtzGOBYd/kr06XSs=\ncloud.google.com/go/secretmanager v1.3.0/go.mod h1:+oLTkouyiYiabAQNugCeTS3PAArGiMJuBqvJnJsyH+U=\ncloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=\ncloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=\ncloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=\ncloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=\ncloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=\ncloud.google.com/go/storage v1.12.0/go.mod h1:fFLk2dp2oAhDz8QFKwqrjdJvxSp/W2g7nillojlL5Ho=\ncloud.google.com/go/storage v1.21.0/go.mod h1:XmRlxkgPjlBONznT2dDUU/5XlpU2OjMnKuqnZI01LAA=\ncloud.google.com/go/trace v1.0.0/go.mod h1:4iErSByzxkyHWzzlAj63/Gmjz0NH1ASqhJguHpGcr6A=\ncloud.google.com/go/trace v1.2.0/go.mod h1:Wc8y/uYyOhPy12KEnXG9XGrvfMz5F5SrYecQlbW1rwM=\ncontrib.go.opencensus.io/exporter/aws v0.0.0-20200617204711-c478e41e60e9/go.mod h1:uu1P0UCM/6RbsMrgPa98ll8ZcHM858i/AD06a9aLRCA=\ncontrib.go.opencensus.io/exporter/stackdriver v0.13.10/go.mod h1:I5htMbyta491eUxufwwZPQdcKvvgzMB4O9ni41YnIM8=\ncontrib.go.opencensus.io/integrations/ocsql v0.1.7/go.mod h1:8DsSdjz3F+APR+0z0WkU1aRorQCFfRxvqjUUPMbF3fE=\ndmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=\nfilippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=\nfilippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=\ngithub.com/Azure/azure-amqp-common-go/v3 v3.2.1/go.mod h1:O6X1iYHP7s2x7NjUKsXVhkwWrQhxrd+d8/3rRadj4CI=\ngithub.com/Azure/azure-amqp-common-go/v3 v3.2.2/go.mod h1:O6X1iYHP7s2x7NjUKsXVhkwWrQhxrd+d8/3rRadj4CI=\ngithub.com/Azure/azure-pipeline-go v0.2.3/go.mod h1:x841ezTBIMG6O3lAcl8ATHnsOPVl2bqk7S3ta6S6u4k=\ngithub.com/Azure/azure-sdk-for-go v51.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=\ngithub.com/Azure/azure-sdk-for-go v59.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=\ngithub.com/Azure/azure-sdk-for-go/sdk/azcore v0.19.0/go.mod h1:h6H6c8enJmmocHUbLiiGY6sx7f9i+X3m1CHdd5c6Rdw=\ngithub.com/Azure/azure-sdk-for-go/sdk/azcore v1.0.0/go.mod h1:uGG2W01BaETf0Ozp+QxxKJdMBNRWPdstHG0Fmdwn1/U=\ngithub.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q=\ngithub.com/Azure/azure-sdk-for-go/sdk/azidentity v0.11.0/go.mod h1:HcM1YX14R7CJcghJGOYCgdezslRSVzqwLf/q+4Y2r/0=\ngithub.com/Azure/azure-sdk-for-go/sdk/azidentity v1.0.0/go.mod h1:+6sju8gk8FRmSajX3Oz4G5Gm7P+mbqE9FVaXXFYTkCM=\ngithub.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0/go.mod h1:OQeznEEkTZ9OrhHJoDD8ZDq51FHgXjqtP9z6bEwBq9U=\ngithub.com/Azure/azure-sdk-for-go/sdk/internal v0.7.0/go.mod h1:yqy467j36fJxcRV2TzfVZ1pCb5vxm4BtZPUdYWe/Xo8=\ngithub.com/Azure/azure-sdk-for-go/sdk/internal v1.0.0/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w=\ngithub.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM=\ngithub.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal v1.0.0/go.mod h1:ceIuwmxDWptoW3eCqSXlnPsZFKh4X+R38dWPv7GS9Vs=\ngithub.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.0.0/go.mod h1:s1tW/At+xHqjNFvWU4G0c0Qv33KOhvbGNj0RCTQDV8s=\ngithub.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.2.0/go.mod h1:c+Lifp3EDEamAkPVzMooRNOK6CZjNSdEnf1A7jsI9u4=\ngithub.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.1.0/go.mod h1:7QJP7dr2wznCMeqIrhMgWGf7XpAQnVrJqDm9nvV3Cu4=\ngithub.com/Azure/azure-service-bus-go v0.11.5/go.mod h1:MI6ge2CuQWBVq+ly456MY7XqNLJip5LO1iSFodbNLbU=\ngithub.com/Azure/azure-storage-blob-go v0.14.0/go.mod h1:SMqIBi+SuiQH32bvyjngEewEeXoPfKMgWlBDaYf6fck=\ngithub.com/Azure/go-amqp v0.16.0/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg=\ngithub.com/Azure/go-amqp v0.16.4/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg=\ngithub.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=\ngithub.com/Azure/go-autorest/autorest v0.11.18/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA=\ngithub.com/Azure/go-autorest/autorest v0.11.19/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA=\ngithub.com/Azure/go-autorest/autorest v0.11.22/go.mod h1:BAWYUWGPEtKPzjVkp0Q6an0MJcJDsoh5Z1BFAEFs4Xs=\ngithub.com/Azure/go-autorest/autorest/adal v0.9.5/go.mod h1:B7KF7jKIeC9Mct5spmyCB/A8CG/sEz1vwIRGv/bbw7A=\ngithub.com/Azure/go-autorest/autorest/adal v0.9.13/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M=\ngithub.com/Azure/go-autorest/autorest/adal v0.9.14/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M=\ngithub.com/Azure/go-autorest/autorest/adal v0.9.17/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ=\ngithub.com/Azure/go-autorest/autorest/azure/auth v0.5.9/go.mod h1:hg3/1yw0Bq87O3KvvnJoAh34/0zbP7SFizX/qN5JvjU=\ngithub.com/Azure/go-autorest/autorest/azure/cli v0.4.2/go.mod h1:7qkJkT+j6b+hIpzMOwPChJhTqS8VbsqqgULzMNRugoM=\ngithub.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74=\ngithub.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k=\ngithub.com/Azure/go-autorest/autorest/to v0.4.0/go.mod h1:fE8iZBn7LQR7zH/9XU2NcPR4o9jEImooCeWJcYV/zLE=\ngithub.com/Azure/go-autorest/autorest/validation v0.3.1/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E=\ngithub.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8=\ngithub.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU=\ngithub.com/AzureAD/microsoft-authentication-library-for-go v0.4.0/go.mod h1:Vt9sXTKwMyGcOxSmLDMnGPgqsUg7m8pe215qMLrDXw4=\ngithub.com/AzureAD/microsoft-authentication-library-for-go v1.0.0/go.mod h1:kgDmCTgBzIEPFElEF+FK0SdjAor06dRq2Go927dnQ6o=\ngithub.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=\ngithub.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=\ngithub.com/GoogleCloudPlatform/cloudsql-proxy v1.29.0/go.mod h1:spvB9eLJH9dutlbPSRmHvSXXHOwGRyeXh1jVdquA2G8=\ngithub.com/IBM/sarama v1.47.0 h1:GcQFEd12+KzfPYeLgN69Fh7vLCtYRhVIx0rO4TZO318=\ngithub.com/IBM/sarama v1.47.0/go.mod h1:7gLLIU97nznOmA6TX++Qds+DRxH89P2XICY2KAQUzAY=\ngithub.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=\ngithub.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=\ngithub.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=\ngithub.com/apache/arrow/go/arrow v0.0.0-20200730104253-651201b0f516 h1:byKBBF2CKWBjjA4J1ZL2JXttJULvWSl50LegTyRZ728=\ngithub.com/apache/arrow/go/arrow v0.0.0-20200730104253-651201b0f516/go.mod h1:QNYViu/X0HXDHw7m3KXzWSVXIbfUvJqBFe6Gj8/pYA0=\ngithub.com/apache/thrift v0.0.0-20181112125854-24918abba929/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=\ngithub.com/apache/thrift v0.14.2 h1:hY4rAyg7Eqbb27GB6gkhUKrRAuc8xRjlNtJq+LseKeY=\ngithub.com/apache/thrift v0.14.2/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=\ngithub.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=\ngithub.com/aws/aws-sdk-go v1.30.19/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=\ngithub.com/aws/aws-sdk-go v1.37.0/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=\ngithub.com/aws/aws-sdk-go v1.43.31/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=\ngithub.com/aws/aws-sdk-go-v2 v1.16.2/go.mod h1:ytwTPBG6fXTZLxxeeCCWj2/EMYp/xDUgX+OET6TLNNU=\ngithub.com/aws/aws-sdk-go-v2 v1.23.0/go.mod h1:i1XDttT4rnf6vxc9AuskLc6s7XBee8rlLilKlc03uAA=\ngithub.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.1/go.mod h1:n8Bs1ElDD2wJ9kCRTczA83gYbBmjSwZp3umc6zF4EeM=\ngithub.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.5.1/go.mod h1:t8PYl/6LzdAqsU4/9tz28V/kU+asFePvpOMkdul0gEQ=\ngithub.com/aws/aws-sdk-go-v2/config v1.15.3/go.mod h1:9YL3v07Xc/ohTsxFXzan9ZpFpdTOFl4X65BAKYaz8jg=\ngithub.com/aws/aws-sdk-go-v2/config v1.25.3/go.mod h1:tAByZy03nH5jcq0vZmkcVoo6tRzRHEwSFx3QW4NmDw8=\ngithub.com/aws/aws-sdk-go-v2/credentials v1.11.2/go.mod h1:j8YsY9TXTm31k4eFhspiQicfXPLZ0gYXA50i4gxPE8g=\ngithub.com/aws/aws-sdk-go-v2/credentials v1.16.2/go.mod h1:sDdvGhXrSVT5yzBDR7qXz+rhbpiMpUYfF3vJ01QSdrc=\ngithub.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.3/go.mod h1:uk1vhHHERfSVCUnqSqz8O48LBYDSC+k6brng09jcMOk=\ngithub.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.14.4/go.mod h1:t4i+yGHMCcUNIX1x7YVYa6bH/Do7civ5I6cG/6PMfyA=\ngithub.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.3/go.mod h1:0dHuD2HZZSiwfJSy1FO5bX1hQ1TxVV1QXXjpn3XUE44=\ngithub.com/aws/aws-sdk-go-v2/feature/s3/manager v1.14.0/go.mod h1:UcgIwJ9KHquYxs6Q5skC9qXjhYMK+JASDYcXQ4X7JZE=\ngithub.com/aws/aws-sdk-go-v2/internal/configsources v1.1.9/go.mod h1:AnVH5pvai0pAF4lXRq0bmhbes1u9R8wTE+g+183bZNM=\ngithub.com/aws/aws-sdk-go-v2/internal/configsources v1.2.3/go.mod h1:7sGSz1JCKHWWBHq98m6sMtWQikmYPpxjqOydDemiVoM=\ngithub.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.3/go.mod h1:ssOhaLpRlh88H3UmEcsBoVKq309quMvm3Ds8e9d4eJM=\ngithub.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.5.3/go.mod h1:ify42Rb7nKeDDPkFjKn7q1bPscVPu/+gmHH8d2c+anU=\ngithub.com/aws/aws-sdk-go-v2/internal/ini v1.3.10/go.mod h1:8DcYQcz0+ZJaSxANlHIsbbi6S+zMwjwdDqwW3r9AzaE=\ngithub.com/aws/aws-sdk-go-v2/internal/ini v1.7.1/go.mod h1:6fQQgfuGmw8Al/3M2IgIllycxV7ZW7WCdVSqfBeUiCY=\ngithub.com/aws/aws-sdk-go-v2/internal/v4a v1.2.3/go.mod h1:5yzAuE9i2RkVAttBl8yxZgQr5OCq4D5yDnG7j9x2L0U=\ngithub.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.1/go.mod h1:GeUru+8VzrTXV/83XyMJ80KpH8xO89VPoUileyNQ+tc=\ngithub.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.10.1/go.mod h1:l9ymW25HOqymeU2m1gbUQ3rUIsTwKs8gYHXkqDQUhiI=\ngithub.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.3/go.mod h1:Seb8KNmD6kVTjwRjVEgOT5hPin6sq+v4C2ycJQDwuH8=\ngithub.com/aws/aws-sdk-go-v2/service/internal/checksum v1.2.3/go.mod h1:R+/S1O4TYpcktbVwddeOYg+uwUfLhADP2S/x4QwsCTM=\ngithub.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.3/go.mod h1:wlY6SVjuwvh3TVRpTqdy4I1JpBFLX4UGeKZdWntaocw=\ngithub.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.10.3/go.mod h1:Owv1I59vaghv1Ax8zz8ELY8DN7/Y0rGS+WWAmjgi950=\ngithub.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.3/go.mod h1:Bm/v2IaN6rZ+Op7zX+bOUMdL4fsrYZiD0dsjLhNKwZc=\ngithub.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.16.3/go.mod h1:KZgs2ny8HsxRIRbDwgvJcHHBZPOzQr/+NtGwnP+w2ec=\ngithub.com/aws/aws-sdk-go-v2/service/kms v1.16.3/go.mod h1:QuiHPBqlOFCi4LqdSskYYAWpQlx3PKmohy+rE2F+o5g=\ngithub.com/aws/aws-sdk-go-v2/service/s3 v1.26.3/go.mod h1:g1qvDuRsJY+XghsV6zg00Z4KJ7DtFFCx8fJD2a491Ak=\ngithub.com/aws/aws-sdk-go-v2/service/s3 v1.43.0/go.mod h1:NXRKkiRF+erX2hnybnVU660cYT5/KChRD4iUgJ97cI8=\ngithub.com/aws/aws-sdk-go-v2/service/secretsmanager v1.15.4/go.mod h1:PJc8s+lxyU8rrre0/4a0pn2wgwiDvOEzoOjcJUBr67o=\ngithub.com/aws/aws-sdk-go-v2/service/sns v1.17.4/go.mod h1:kElt+uCcXxcqFyc+bQqZPFD9DME/eC6oHBXvFzQ9Bcw=\ngithub.com/aws/aws-sdk-go-v2/service/sqs v1.18.3/go.mod h1:skmQo0UPvsjsuYYSYMVmrPc1HWCbHUJyrCEp+ZaLzqM=\ngithub.com/aws/aws-sdk-go-v2/service/ssm v1.24.1/go.mod h1:NR/xoKjdbRJ+qx0pMR4mI+N/H1I1ynHwXnO6FowXJc0=\ngithub.com/aws/aws-sdk-go-v2/service/sso v1.11.3/go.mod h1:7UQ/e69kU7LDPtY40OyoHYgRmgfGM4mgsLYtcObdveU=\ngithub.com/aws/aws-sdk-go-v2/service/sso v1.17.2/go.mod h1:/pE21vno3q1h4bbhUOEi+6Zu/aT26UK2WKkDXd+TssQ=\ngithub.com/aws/aws-sdk-go-v2/service/ssooidc v1.20.0/go.mod h1:dWqm5G767qwKPuayKfzm4rjzFmVjiBFbOJrpSPnAMDs=\ngithub.com/aws/aws-sdk-go-v2/service/sts v1.16.3/go.mod h1:bfBj0iVmsUyUg4weDB4NxktD9rDGeKSVWnjTnwbx9b8=\ngithub.com/aws/aws-sdk-go-v2/service/sts v1.25.3/go.mod h1:4EqRHDCKP78hq3zOnmFXu5k0j4bXbRFfCh/zQ6KnEfQ=\ngithub.com/aws/smithy-go v1.11.2/go.mod h1:3xHYmszWVx2c0kIwQeEVf9uSm4fYZt67FBJnwub1bgM=\ngithub.com/aws/smithy-go v1.17.0/go.mod h1:NukqUGpCZIILqqiV0NIjeFh24kd/FAa4beRb6nbIUPE=\ngithub.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=\ngithub.com/bobg/gcsobj v0.1.2/go.mod h1:vS49EQ1A1Ib8FgrL58C8xXYZyOCR2TgzAdopy6/ipa8=\ngithub.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=\ngithub.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=\ngithub.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=\ngithub.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=\ngithub.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=\ngithub.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=\ngithub.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=\ngithub.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=\ngithub.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=\ngithub.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=\ngithub.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=\ngithub.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=\ngithub.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=\ngithub.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=\ngithub.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=\ngithub.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=\ngithub.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=\ngithub.com/colinmarc/hdfs/v2 v2.1.1/go.mod h1:M3x+k8UKKmxtFu++uAZ0OtDU8jR3jnaZIAc6yK4Ue0c=\ngithub.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=\ngithub.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=\ngithub.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=\ngithub.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=\ngithub.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=\ngithub.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=\ngithub.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=\ngithub.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=\ngithub.com/denisenkom/go-mssqldb v0.12.0/go.mod h1:iiK0YP1ZeepvmBQk/QpLEhhTNJgfzrpArPY/aFvc9yU=\ngithub.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY=\ngithub.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8=\ngithub.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE=\ngithub.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko=\ngithub.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ=\ngithub.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=\ngithub.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=\ngithub.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=\ngithub.com/eapache/go-resiliency v1.7.0 h1:n3NRTnBn5N0Cbi/IeOHuQn9s2UwVUH7Ga0ZWcP+9JTA=\ngithub.com/eapache/go-resiliency v1.7.0/go.mod h1:5yPzW0MIvSe0JDsv0v+DvcjEv2FyD6iZYSs1ZI+iQho=\ngithub.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc=\ngithub.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=\ngithub.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=\ngithub.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=\ngithub.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=\ngithub.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=\ngithub.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=\ngithub.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=\ngithub.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=\ngithub.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0=\ngithub.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=\ngithub.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=\ngithub.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=\ngithub.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k=\ngithub.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=\ngithub.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=\ngithub.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=\ngithub.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=\ngithub.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=\ngithub.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M=\ngithub.com/gin-gonic/gin v1.7.3/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY=\ngithub.com/go-chi/chi/v5 v5.2.5 h1:Eg4myHZBjyvJmAFjFvWgrqDTXFyOzjj7YIm3L3mu6Ug=\ngithub.com/go-chi/chi/v5 v5.2.5/go.mod h1:X7Gx4mteadT3eDOMTsXzmI4/rwUpOwBHLpAfupzFJP0=\ngithub.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=\ngithub.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=\ngithub.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=\ngithub.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=\ngithub.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=\ngithub.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=\ngithub.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=\ngithub.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=\ngithub.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=\ngithub.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=\ngithub.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=\ngithub.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI=\ngithub.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=\ngithub.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=\ngithub.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=\ngithub.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=\ngithub.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=\ngithub.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=\ngithub.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo=\ngithub.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=\ngithub.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM=\ngithub.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=\ngithub.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=\ngithub.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=\ngithub.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=\ngithub.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=\ngithub.com/golang-jwt/jwt/v4 v4.4.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=\ngithub.com/golang-jwt/jwt/v4 v4.4.3/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=\ngithub.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=\ngithub.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=\ngithub.com/golang-sql/sqlexp v0.0.0-20170517235910-f1bb20e5a188/go.mod h1:vXjM/+wXQnTPR4KqTKDgJukSZ6amVRtWMPEjE6sQoK8=\ngithub.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=\ngithub.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=\ngithub.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=\ngithub.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=\ngithub.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=\ngithub.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=\ngithub.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=\ngithub.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=\ngithub.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=\ngithub.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=\ngithub.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=\ngithub.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=\ngithub.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8=\ngithub.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=\ngithub.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=\ngithub.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=\ngithub.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=\ngithub.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=\ngithub.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=\ngithub.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=\ngithub.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=\ngithub.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=\ngithub.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=\ngithub.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=\ngithub.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=\ngithub.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=\ngithub.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=\ngithub.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=\ngithub.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=\ngithub.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=\ngithub.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=\ngithub.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=\ngithub.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=\ngithub.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA=\ngithub.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=\ngithub.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=\ngithub.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=\ngithub.com/google/flatbuffers v1.11.0 h1:O7CEyB8Cb3/DmtxODGtLHcEvpr81Jm5qLg/hsHnxA2A=\ngithub.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=\ngithub.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=\ngithub.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=\ngithub.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=\ngithub.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=\ngithub.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=\ngithub.com/google/go-replayers/grpcreplay v1.1.0/go.mod h1:qzAvJ8/wi57zq7gWqaE6AwLM6miiXUQwP1S+I9icmhk=\ngithub.com/google/go-replayers/httpreplay v1.1.1/go.mod h1:gN9GeLIs7l6NUoVaSSnv2RiqK1NiwAmD0MrKeC9IIks=\ngithub.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=\ngithub.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=\ngithub.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=\ngithub.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=\ngithub.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=\ngithub.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk=\ngithub.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk=\ngithub.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=\ngithub.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=\ngithub.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=\ngithub.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=\ngithub.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=\ngithub.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=\ngithub.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=\ngithub.com/google/pprof v0.0.0-20200905233945-acf8798be1f7/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=\ngithub.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20210506205249-923b5ab0fc1a/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=\ngithub.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=\ngithub.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=\ngithub.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=\ngithub.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=\ngithub.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=\ngithub.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=\ngithub.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=\ngithub.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=\ngithub.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=\ngithub.com/google/wire v0.5.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU=\ngithub.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=\ngithub.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=\ngithub.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0=\ngithub.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM=\ngithub.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM=\ngithub.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=\ngithub.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=\ngithub.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=\ngithub.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=\ngithub.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=\ngithub.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=\ngithub.com/hanwen/go-fuse v1.0.0/go.mod h1:unqXarDXqzAk0rt98O2tVndEPIpUgLD9+rwFisZH3Ok=\ngithub.com/hanwen/go-fuse/v2 v2.1.0/go.mod h1:oRyA5eK+pvJyv5otpO/DgccS8y/RvYMaO00GgRLGryc=\ngithub.com/hashicorp/go-uuid v0.0.0-20180228145832-27454136f036/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=\ngithub.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=\ngithub.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=\ngithub.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=\ngithub.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=\ngithub.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU=\ngithub.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=\ngithub.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=\ngithub.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=\ngithub.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=\ngithub.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=\ngithub.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=\ngithub.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=\ngithub.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo=\ngithub.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=\ngithub.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=\ngithub.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA=\ngithub.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE=\ngithub.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s=\ngithub.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o=\ngithub.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY=\ngithub.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI=\ngithub.com/jackc/pgconn v1.11.0/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI=\ngithub.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=\ngithub.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE=\ngithub.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c=\ngithub.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak=\ngithub.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=\ngithub.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78=\ngithub.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA=\ngithub.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg=\ngithub.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=\ngithub.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=\ngithub.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=\ngithub.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=\ngithub.com/jackc/pgproto3/v2 v2.2.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=\ngithub.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=\ngithub.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=\ngithub.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=\ngithub.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=\ngithub.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM=\ngithub.com/jackc/pgtype v1.10.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4=\ngithub.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=\ngithub.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=\ngithub.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=\ngithub.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs=\ngithub.com/jackc/pgx/v4 v4.15.0/go.mod h1:D/zyOyXiaM1TmVWnOM18p0xdDtdakRBa0RsVGI3U3bw=\ngithub.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=\ngithub.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=\ngithub.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=\ngithub.com/jackc/puddle v1.2.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=\ngithub.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8=\ngithub.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs=\ngithub.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo=\ngithub.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM=\ngithub.com/jcmturner/gofork v0.0.0-20180107083740-2aebee971930/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o=\ngithub.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg=\ngithub.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo=\ngithub.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o=\ngithub.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg=\ngithub.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh687T8=\ngithub.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs=\ngithub.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY=\ngithub.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=\ngithub.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=\ngithub.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik=\ngithub.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=\ngithub.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=\ngithub.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=\ngithub.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=\ngithub.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=\ngithub.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=\ngithub.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=\ngithub.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=\ngithub.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=\ngithub.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=\ngithub.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg=\ngithub.com/klauspost/compress v1.15.1/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=\ngithub.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=\ngithub.com/klauspost/compress v1.18.4 h1:RPhnKRAQ4Fh8zU2FY/6ZFDwTVTxgJ/EMydqSTzE9a2c=\ngithub.com/klauspost/compress v1.18.4/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=\ngithub.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=\ngithub.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=\ngithub.com/klauspost/cpuid/v2 v2.1.0/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY=\ngithub.com/klauspost/cpuid/v2 v2.2.11 h1:0OwqZRYI2rFrjS4kvkDnqJkKHdHaRnCm68/DY4OxRzU=\ngithub.com/klauspost/cpuid/v2 v2.2.11/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=\ngithub.com/klauspost/crc32 v1.3.0 h1:sSmTt3gUt81RP655XGZPElI0PelVTZ6YwCRnPSupoFM=\ngithub.com/klauspost/crc32 v1.3.0/go.mod h1:D7kQaZhnkX/Y0tstFGf8VUzv2UofNGqCjnC3zdHB0Hw=\ngithub.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=\ngithub.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=\ngithub.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=\ngithub.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=\ngithub.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=\ngithub.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=\ngithub.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=\ngithub.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=\ngithub.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=\ngithub.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=\ngithub.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k=\ngithub.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=\ngithub.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=\ngithub.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=\ngithub.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=\ngithub.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=\ngithub.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=\ngithub.com/lib/pq v1.10.4/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=\ngithub.com/lib/pq v1.11.2 h1:x6gxUeu39V0BHZiugWe8LXZYZ+Utk7hSJGThs8sdzfs=\ngithub.com/lib/pq v1.11.2/go.mod h1:/p+8NSbOcwzAEI7wiMXFlgydTwcgTr3OSKMsD2BitpA=\ngithub.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=\ngithub.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=\ngithub.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=\ngithub.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=\ngithub.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E=\ngithub.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=\ngithub.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=\ngithub.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=\ngithub.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=\ngithub.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=\ngithub.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=\ngithub.com/minio/crc64nvme v1.1.1 h1:8dwx/Pz49suywbO+auHCBpCtlW1OfpcLN7wYgVR6wAI=\ngithub.com/minio/crc64nvme v1.1.1/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg=\ngithub.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=\ngithub.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=\ngithub.com/minio/minio-go/v7 v7.0.34/go.mod h1:nCrRzjoSUQh8hgKKtu3Y708OLvRLtuASMg2/nvmbarw=\ngithub.com/minio/minio-go/v7 v7.0.98 h1:MeAVKjLVz+XJ28zFcuYyImNSAh8Mq725uNW4beRisi0=\ngithub.com/minio/minio-go/v7 v7.0.98/go.mod h1:cY0Y+W7yozf0mdIclrttzo1Iiu7mEf9y7nk2uXqMOvM=\ngithub.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM=\ngithub.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=\ngithub.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=\ngithub.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=\ngithub.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=\ngithub.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=\ngithub.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=\ngithub.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=\ngithub.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8=\ngithub.com/montanaflynn/stats v0.6.6/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=\ngithub.com/montanaflynn/stats v0.7.0/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=\ngithub.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=\ngithub.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=\ngithub.com/ncw/swift v1.0.52/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM=\ngithub.com/pborman/getopt v0.0.0-20180729010549-6fdd0a2c7117/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o=\ngithub.com/philhofer/fwd v1.2.0 h1:e6DnBTl7vGY+Gz322/ASL4Gyp1FspeMvx1RNDoToZuM=\ngithub.com/philhofer/fwd v1.2.0/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM=\ngithub.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=\ngithub.com/pierrec/lz4/v4 v4.1.25 h1:kocOqRffaIbU5djlIBr7Wh+cx82C0vtFb0fOurZHqD0=\ngithub.com/pierrec/lz4/v4 v4.1.25/go.mod h1:EoQMVJgeeEOMsCqCzqFm2O0cJvljX2nGZjcRIPL34O4=\ngithub.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA=\ngithub.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4/go.mod h1:N6UoU20jOqggOuDwUaBQpluzLNDqif3kq9z2wpdYEfQ=\ngithub.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI=\ngithub.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=\ngithub.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=\ngithub.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=\ngithub.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=\ngithub.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=\ngithub.com/rcrowley/go-metrics v0.0.0-20250401214520-65e299d6c5c9 h1:bsUq1dX0N8AOIL7EB/X911+m4EHsnWEHeJ0c+3TTBrg=\ngithub.com/rcrowley/go-metrics v0.0.0-20250401214520-65e299d6c5c9/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=\ngithub.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=\ngithub.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=\ngithub.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=\ngithub.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=\ngithub.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=\ngithub.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=\ngithub.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=\ngithub.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=\ngithub.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=\ngithub.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=\ngithub.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=\ngithub.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=\ngithub.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=\ngithub.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=\ngithub.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=\ngithub.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=\ngithub.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=\ngithub.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=\ngithub.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=\ngithub.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=\ngithub.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=\ngithub.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=\ngithub.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=\ngithub.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=\ngithub.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=\ngithub.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=\ngithub.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=\ngithub.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=\ngithub.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=\ngithub.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=\ngithub.com/stretchr/testify v1.2.0/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=\ngithub.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=\ngithub.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=\ngithub.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=\ngithub.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=\ngithub.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=\ngithub.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=\ngithub.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=\ngithub.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=\ngithub.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=\ngithub.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=\ngithub.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=\ngithub.com/tinylib/msgp v1.6.1 h1:ESRv8eL3u+DNHUoSAAQRE50Hm162zqAnBoGv9PzScPY=\ngithub.com/tinylib/msgp v1.6.1/go.mod h1:RSp0LW9oSxFut3KzESt5Voq4GVWyS+PSulT77roAqEA=\ngithub.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=\ngithub.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=\ngithub.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=\ngithub.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=\ngithub.com/xdg-go/scram v1.2.0 h1:bYKF2AEwG5rqd1BumT4gAnvwU/M9nBp2pTSxeZw7Wvs=\ngithub.com/xdg-go/scram v1.2.0/go.mod h1:3dlrS0iBaWKYVt2ZfA4cj48umJZ+cAEbR6/SjLA88I8=\ngithub.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8=\ngithub.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM=\ngithub.com/xitongsys/parquet-go v1.5.1/go.mod h1:xUxwM8ELydxh4edHGegYq1pA8NnMKDx0K/GyB0o2bww=\ngithub.com/xitongsys/parquet-go v1.6.2 h1:MhCaXii4eqceKPu9BwrjLqyK10oX9WF+xGhwvwbw7xM=\ngithub.com/xitongsys/parquet-go v1.6.2/go.mod h1:IulAQyalCm0rPiZVNnCgm/PCL64X2tdSVGMQ/UeKqWA=\ngithub.com/xitongsys/parquet-go-source v0.0.0-20190524061010-2b72cbee77d5/go.mod h1:xxCx7Wpym/3QCo6JhujJX51dzSXrwmb0oH6FQb39SEA=\ngithub.com/xitongsys/parquet-go-source v0.0.0-20200817004010-026bad9b25d0/go.mod h1:HYhIKsdns7xz80OgkbgJYrtQY7FjHWHKH6cvN7+czGE=\ngithub.com/xitongsys/parquet-go-source v0.0.0-20241021075129-b732d2ac9c9b h1:zbb5qM/t3N+O33Vp5sFyG6yIcWZV1q7rfEjJM8UsRBQ=\ngithub.com/xitongsys/parquet-go-source v0.0.0-20241021075129-b732d2ac9c9b/go.mod h1:2ActxmJ4q17Cdruar9nKEkzKSOL1Ol03737Bkz10rTY=\ngithub.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=\ngithub.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=\ngithub.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=\ngithub.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=\ngithub.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=\ngithub.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=\ngithub.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=\ngo.opencensus.io v0.15.0/go.mod h1:UffZAU+4sDEINUGP/B7UfBBkq4fqLu9zXAX7ke6CHW0=\ngo.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=\ngo.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=\ngo.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=\ngo.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=\ngo.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=\ngo.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=\ngo.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=\ngo.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=\ngo.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=\ngo.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=\ngo.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=\ngo.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=\ngo.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=\ngo.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=\ngo.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ=\ngo.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=\ngo.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=\ngo.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU=\ngo.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=\ngo.uber.org/multierr v1.8.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak=\ngo.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA=\ngo.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=\ngo.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=\ngo.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM=\ngo.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw=\ngo.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=\ngo.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=\ngocloud.dev v0.26.0/go.mod h1:mkUgejbnbLotorqDyvedJO20XcZNTynmSeVSQS9btVg=\ngolang.org/x/crypto v0.0.0-20180723164146-c126467f60eb/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=\ngolang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=\ngolang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=\ngolang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=\ngolang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=\ngolang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=\ngolang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=\ngolang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=\ngolang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=\ngolang.org/x/crypto v0.0.0-20201016220609-9e8e0b390897/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=\ngolang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=\ngolang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=\ngolang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=\ngolang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=\ngolang.org/x/crypto v0.0.0-20211115234514-b4de73f9ece8/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=\ngolang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=\ngolang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=\ngolang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=\ngolang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=\ngolang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=\ngolang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0=\ngolang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=\ngolang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=\ngolang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=\ngolang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=\ngolang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=\ngolang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=\ngolang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=\ngolang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=\ngolang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=\ngolang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=\ngolang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=\ngolang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=\ngolang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY=\ngolang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70=\ngolang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=\ngolang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=\ngolang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=\ngolang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=\ngolang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=\ngolang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=\ngolang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=\ngolang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=\ngolang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=\ngolang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=\ngolang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=\ngolang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=\ngolang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=\ngolang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=\ngolang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=\ngolang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=\ngolang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=\ngolang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=\ngolang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=\ngolang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=\ngolang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=\ngolang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=\ngolang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=\ngolang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=\ngolang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=\ngolang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=\ngolang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=\ngolang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=\ngolang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=\ngolang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=\ngolang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=\ngolang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=\ngolang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=\ngolang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=\ngolang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=\ngolang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20191112182307-2180aed22343/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=\ngolang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=\ngolang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=\ngolang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=\ngolang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=\ngolang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=\ngolang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=\ngolang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=\ngolang.org/x/net v0.0.0-20200904194848-62affa334b73/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=\ngolang.org/x/net v0.0.0-20201010224723-4f7140c49acb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=\ngolang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=\ngolang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=\ngolang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=\ngolang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=\ngolang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=\ngolang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=\ngolang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=\ngolang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=\ngolang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=\ngolang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=\ngolang.org/x/net v0.0.0-20211020060615-d418f374d309/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=\ngolang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=\ngolang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=\ngolang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=\ngolang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=\ngolang.org/x/net v0.0.0-20220401154927-543a649e0bdd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=\ngolang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=\ngolang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=\ngolang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=\ngolang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=\ngolang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=\ngolang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=\ngolang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo=\ngolang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y=\ngolang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=\ngolang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=\ngolang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=\ngolang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=\ngolang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=\ngolang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210427180440-81ed05c6b58c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=\ngolang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=\ngolang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=\ngolang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=\ngolang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191112214154-59a1497f0cea/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200828194041-157a740278f4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210503080704-8803ae5d1324/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20211116061358-0a5406a5449c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220330033206-e17cdc41300f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=\ngolang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=\ngolang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=\ngolang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=\ngolang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=\ngolang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=\ngolang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=\ngolang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=\ngolang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=\ngolang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=\ngolang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=\ngolang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=\ngolang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=\ngolang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=\ngolang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=\ngolang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=\ngolang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=\ngolang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=\ngolang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=\ngolang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=\ngolang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=\ngolang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=\ngolang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=\ngolang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=\ngolang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=\ngolang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=\ngolang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=\ngolang.org/x/time v0.0.0-20220224211638-0e9765cccd65/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=\ngolang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=\ngolang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=\ngolang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=\ngolang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=\ngolang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=\ngolang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=\ngolang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=\ngolang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=\ngolang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=\ngolang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=\ngolang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=\ngolang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=\ngolang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=\ngolang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=\ngolang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=\ngolang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=\ngolang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=\ngolang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=\ngolang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=\ngolang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=\ngolang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=\ngolang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=\ngolang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=\ngolang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=\ngolang.org/x/tools v0.0.0-20200828161849-5deb26317202/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=\ngolang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE=\ngolang.org/x/tools v0.0.0-20200915173823-2db8f0ff891c/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU=\ngolang.org/x/tools v0.0.0-20200918232735-d647fc253266/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU=\ngolang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=\ngolang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=\ngolang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=\ngolang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=\ngolang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=\ngolang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=\ngolang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=\ngolang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=\ngolang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=\ngolang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=\ngolang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=\ngolang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=\ngolang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=\ngolang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=\ngolang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=\ngolang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=\ngolang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=\ngolang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=\ngolang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=\ngolang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=\ngolang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=\ngoogle.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=\ngoogle.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=\ngoogle.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=\ngoogle.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=\ngoogle.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=\ngoogle.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=\ngoogle.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=\ngoogle.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=\ngoogle.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=\ngoogle.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=\ngoogle.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=\ngoogle.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=\ngoogle.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=\ngoogle.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=\ngoogle.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=\ngoogle.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=\ngoogle.golang.org/api v0.31.0/go.mod h1:CL+9IBCa2WWU6gRuBWaKqGWLFFwbEUXkfeMkHLQWYWo=\ngoogle.golang.org/api v0.32.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg=\ngoogle.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg=\ngoogle.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE=\ngoogle.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8=\ngoogle.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU=\ngoogle.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94=\ngoogle.golang.org/api v0.46.0/go.mod h1:ceL4oozhkAiTID8XMmJBsIxID/9wMXJVVFXPg4ylg3I=\ngoogle.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo=\ngoogle.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4=\ngoogle.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw=\ngoogle.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU=\ngoogle.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k=\ngoogle.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE=\ngoogle.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE=\ngoogle.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI=\ngoogle.golang.org/api v0.58.0/go.mod h1:cAbP2FsxoGVNwtgNAmmn3y5G1TWAiVYRmg4yku3lv+E=\ngoogle.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU=\ngoogle.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I=\ngoogle.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo=\ngoogle.golang.org/api v0.64.0/go.mod h1:931CdxA8Rm4t6zqTFGSsgwbAEZ2+GMYurbndwSimebM=\ngoogle.golang.org/api v0.66.0/go.mod h1:I1dmXYpX7HGwz/ejRxwQp2qj5bFAz93HiCU1C1oYd9M=\ngoogle.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g=\ngoogle.golang.org/api v0.68.0/go.mod h1:sOM8pTpwgflXRhz+oC8H2Dr+UcbMqkPPWNJo88Q7TH8=\ngoogle.golang.org/api v0.69.0/go.mod h1:boanBiw+h5c3s+tBPgEzLDRHfFLWV0qXxRHz3ws7C80=\ngoogle.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA=\ngoogle.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8=\ngoogle.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs=\ngoogle.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=\ngoogle.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=\ngoogle.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=\ngoogle.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=\ngoogle.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=\ngoogle.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=\ngoogle.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=\ngoogle.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=\ngoogle.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=\ngoogle.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=\ngoogle.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=\ngoogle.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=\ngoogle.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=\ngoogle.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=\ngoogle.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=\ngoogle.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=\ngoogle.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=\ngoogle.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=\ngoogle.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=\ngoogle.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20200831141814-d751682dd103/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20200914193844-75d14daec038/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20200921151605-7abf4a1a14d5/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=\ngoogle.golang.org/genproto v0.0.0-20210429181445-86c259c2b4ab/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=\ngoogle.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=\ngoogle.golang.org/genproto v0.0.0-20210517163617-5e0236093d7a/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=\ngoogle.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=\ngoogle.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=\ngoogle.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=\ngoogle.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24=\ngoogle.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k=\ngoogle.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k=\ngoogle.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=\ngoogle.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=\ngoogle.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w=\ngoogle.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=\ngoogle.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=\ngoogle.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=\ngoogle.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=\ngoogle.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=\ngoogle.golang.org/genproto v0.0.0-20210917145530-b395a37504d4/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=\ngoogle.golang.org/genproto v0.0.0-20210921142501-181ce0d877f6/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211018162055-cf77aa76bad2/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211223182754-3ac035c7e7cb/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20220111164026-67b88f271998/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20220114231437-d2e6a121cae0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20220201184016-50beb8ab5c44/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20220204002441-d6cc3cc0770e/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20220211171837-173942840c17/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=\ngoogle.golang.org/genproto v0.0.0-20220216160803-4663080d8bc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=\ngoogle.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=\ngoogle.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=\ngoogle.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=\ngoogle.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=\ngoogle.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E=\ngoogle.golang.org/genproto v0.0.0-20220401170504-314d38edb7de/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=\ngoogle.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=\ngoogle.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=\ngoogle.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=\ngoogle.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=\ngoogle.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=\ngoogle.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=\ngoogle.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=\ngoogle.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=\ngoogle.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=\ngoogle.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=\ngoogle.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=\ngoogle.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=\ngoogle.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=\ngoogle.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=\ngoogle.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=\ngoogle.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=\ngoogle.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=\ngoogle.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=\ngoogle.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=\ngoogle.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=\ngoogle.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=\ngoogle.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=\ngoogle.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=\ngoogle.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=\ngoogle.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=\ngoogle.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=\ngoogle.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=\ngoogle.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU=\ngoogle.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ=\ngoogle.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=\ngoogle.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=\ngoogle.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=\ngoogle.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=\ngoogle.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=\ngoogle.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=\ngoogle.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=\ngoogle.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=\ngoogle.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=\ngoogle.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=\ngoogle.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=\ngoogle.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=\ngoogle.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=\ngoogle.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=\ngoogle.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=\ngopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=\ngopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=\ngopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=\ngopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=\ngopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=\ngopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=\ngopkg.in/ini.v1 v1.66.6/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=\ngopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo=\ngopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q=\ngopkg.in/jcmturner/goidentity.v3 v3.0.0/go.mod h1:oG2kH0IvSYNIu80dVAyu/yoefjq1mNfM5bm88whjWx4=\ngopkg.in/jcmturner/gokrb5.v7 v7.3.0/go.mod h1:l8VISx+WGYp+Fp7KRbsiUuXTTOnxIc3Tuvyavf11/WM=\ngopkg.in/jcmturner/rpc.v1 v1.1.0/go.mod h1:YIdkC4XfD6GXbzje11McwsDuOlZQSb9W4vfLvuNnlv8=\ngopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=\ngopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=\ngopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=\ngopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=\ngopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=\ngopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=\ngopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=\ngopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=\ngopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=\nhonnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=\nhonnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=\nhonnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=\nhonnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=\nhonnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=\nhonnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=\nhonnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=\nmodernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=\nmodernc.org/cc/v4 v4.27.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=\nmodernc.org/ccgo/v4 v4.30.1 h1:4r4U1J6Fhj98NKfSjnPUN7Ze2c6MnAdL0hWw6+LrJpc=\nmodernc.org/ccgo/v4 v4.30.1/go.mod h1:bIOeI1JL54Utlxn+LwrFyjCx2n2RDiYEaJVSrgdrRfM=\nmodernc.org/fileutil v1.3.40 h1:ZGMswMNc9JOCrcrakF1HrvmergNLAmxOPjizirpfqBA=\nmodernc.org/fileutil v1.3.40/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=\nmodernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=\nmodernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=\nmodernc.org/gc/v3 v3.1.1 h1:k8T3gkXWY9sEiytKhcgyiZ2L0DTyCQ/nvX+LoCljoRE=\nmodernc.org/gc/v3 v3.1.1/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY=\nmodernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=\nmodernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=\nmodernc.org/libc v1.67.6 h1:eVOQvpModVLKOdT+LvBPjdQqfrZq+pC39BygcT+E7OI=\nmodernc.org/libc v1.67.6/go.mod h1:JAhxUVlolfYDErnwiqaLvUqc8nfb2r6S6slAgZOnaiE=\nmodernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=\nmodernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=\nmodernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=\nmodernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=\nmodernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=\nmodernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=\nmodernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=\nmodernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=\nmodernc.org/sqlite v1.44.3 h1:+39JvV/HWMcYslAwRxHb8067w+2zowvFOUrOWIy9PjY=\nmodernc.org/sqlite v1.44.3/go.mod h1:CzbrU2lSB1DKUusvwGz7rqEKIq+NUd8GWuBBZDs9/nA=\nmodernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=\nmodernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=\nmodernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=\nmodernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=\nnhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0=\nrsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=\nrsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=\nrsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=\n"
  },
  {
    "path": "internal/alerts/dispatcher.go",
    "content": "package alerts\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"crypto/tls\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"io\"\n\t\"log/slog\"\n\t\"math\"\n\t\"net/http\"\n\t\"net/smtp\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n)\n\nconst (\n\tChannelTypeSMTP   = \"smtp\"\n\tChannelTypeResend = \"resend\"\n\tChannelTypeBrevo  = \"brevo\"\n)\n\nconst (\n\tEventTypePolicyViolation = \"policy.violation\"\n\tEventTypeScheduleFailed  = \"schedule.failed\"\n\tEventTypeScheduleSlow    = \"schedule.slow\"\n)\n\nconst (\n\tSeverityInfo     = \"info\"\n\tSeverityWarn     = \"warn\"\n\tSeverityError    = \"error\"\n\tSeverityCritical = \"critical\"\n)\n\nconst (\n\tdispatchTickInterval = 8 * time.Second\n\tmaxNewEventsPerTick  = 100\n\tmaxJobsPerTick       = 30\n)\n\ntype Dispatcher struct {\n\tdb     *database.DB\n\tcfg    *config.Config\n\tstopCh chan struct{}\n\thttp   *http.Client\n}\n\nfunc NewDispatcher(db *database.DB, cfg *config.Config) *Dispatcher {\n\treturn &Dispatcher{\n\t\tdb:     db,\n\t\tcfg:    cfg,\n\t\tstopCh: make(chan struct{}),\n\t\thttp: &http.Client{\n\t\t\tTimeout: 15 * time.Second,\n\t\t},\n\t}\n}\n\nfunc (d *Dispatcher) Start() {\n\tgo func() {\n\t\tslog.Info(\"Alert dispatcher started\", \"interval\", dispatchTickInterval)\n\t\tticker := time.NewTicker(dispatchTickInterval)\n\t\tdefer ticker.Stop()\n\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-d.stopCh:\n\t\t\t\tslog.Info(\"Alert dispatcher stopped\")\n\t\t\t\treturn\n\t\t\tcase <-ticker.C:\n\t\t\t\td.tick()\n\t\t\t}\n\t\t}\n\t}()\n}\n\nfunc (d *Dispatcher) Stop() {\n\tclose(d.stopCh)\n}\n\nfunc (d *Dispatcher) tick() {\n\tif !d.cfg.IsPro() {\n\t\treturn\n\t}\n\td.materializeEventJobs()\n\td.processDueJobs()\n\td.processDueDigests()\n}\n\nfunc (d *Dispatcher) materializeEventJobs() {\n\tevents, err := d.db.ListNewAlertEvents(maxNewEventsPerTick)\n\tif err != nil {\n\t\tslog.Error(\"Alert dispatcher failed to list new events\", \"error\", err)\n\t\treturn\n\t}\n\tif len(events) == 0 {\n\t\treturn\n\t}\n\n\trules, err := d.db.ListEnabledAlertRules()\n\tif err != nil {\n\t\tslog.Error(\"Alert dispatcher failed to list enabled rules\", \"error\", err)\n\t\treturn\n\t}\n\n\troutesByRule := make(map[string][]database.AlertRuleRouteView)\n\tnow := time.Now().UTC()\n\tfor _, event := range events {\n\t\tfor _, rule := range rules {\n\t\t\tif !ruleMatchesEvent(rule, event) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\troutes, ok := routesByRule[rule.ID]\n\t\t\tif !ok {\n\t\t\t\troutes, err = d.db.ListActiveAlertRuleRoutes(rule.ID)\n\t\t\t\tif err != nil {\n\t\t\t\t\tslog.Error(\"Alert dispatcher failed to list active routes\", \"rule\", rule.ID, \"error\", err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\troutesByRule[rule.ID] = routes\n\t\t\t}\n\t\t\tfor _, route := range routes {\n\t\t\t\tif len(route.Recipients) == 0 {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tdeliveryMode := strings.ToLower(strings.TrimSpace(route.DeliveryMode))\n\t\t\t\tif deliveryMode == \"digest\" {\n\t\t\t\t\tif err := d.db.UpsertAlertRouteDigest(rule, route, event, now); err != nil {\n\t\t\t\t\t\tslog.Error(\"Alert dispatcher failed to upsert digest batch\", \"event\", event.ID, \"rule\", rule.ID, \"route\", route.ID, \"error\", err)\n\t\t\t\t\t}\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tif event.Fingerprint != nil && strings.TrimSpace(*event.Fingerprint) != \"\" && rule.CooldownSeconds > 0 {\n\t\t\t\t\tsince := now.Add(-time.Duration(rule.CooldownSeconds) * time.Second)\n\t\t\t\t\texists, err := d.db.HasRecentAlertDispatch(route.ID, *event.Fingerprint, since)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tslog.Warn(\"Alert dispatcher dedupe check failed\", \"route\", route.ID, \"error\", err)\n\t\t\t\t\t} else if exists {\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif _, err := d.db.CreateAlertDispatchJob(event.ID, rule.ID, route.ID, route.ChannelID, rule.MaxAttempts, now); err != nil {\n\t\t\t\t\tslog.Error(\"Alert dispatcher failed to create dispatch job\", \"event\", event.ID, \"rule\", rule.ID, \"route\", route.ID, \"error\", err)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif err := d.db.MarkAlertEventProcessed(event.ID); err != nil {\n\t\t\tslog.Warn(\"Alert dispatcher failed to mark event processed\", \"event\", event.ID, \"error\", err)\n\t\t}\n\t}\n}\n\nfunc (d *Dispatcher) processDueJobs() {\n\tjobs, err := d.db.ListDueAlertDispatchJobs(maxJobsPerTick)\n\tif err != nil {\n\t\tslog.Error(\"Alert dispatcher failed to list due jobs\", \"error\", err)\n\t\treturn\n\t}\n\tif len(jobs) == 0 {\n\t\treturn\n\t}\n\n\tfor _, job := range jobs {\n\t\tif err := d.db.MarkAlertDispatchJobSending(job.ID); err != nil {\n\t\t\tslog.Warn(\"Alert dispatcher failed to mark job sending\", \"job\", job.ID, \"error\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\trecipients := parseRecipients(job.RouteRecipientsJSON)\n\t\tif len(recipients) == 0 {\n\t\t\t_ = d.db.MarkAlertDispatchJobFailed(job.ID, \"route has no recipients\")\n\t\t\tcontinue\n\t\t}\n\n\t\tdecrypted, err := crypto.Decrypt(job.ChannelConfigEncrypted, d.cfg.AppSecretKey)\n\t\tif err != nil {\n\t\t\t_ = d.db.MarkAlertDispatchJobFailed(job.ID, \"decrypt channel config: \"+err.Error())\n\t\t\tcontinue\n\t\t}\n\n\t\tvar channelConfig map[string]interface{}\n\t\tif err := json.Unmarshal([]byte(decrypted), &channelConfig); err != nil {\n\t\t\t_ = d.db.MarkAlertDispatchJobFailed(job.ID, \"parse channel config: \"+err.Error())\n\t\t\tcontinue\n\t\t}\n\n\t\tsubject := renderTemplate(coalesce(job.RuleSubjectTemplate,\n\t\t\tfmt.Sprintf(\"[CH-UI][%s][%s] %s\", strings.ToUpper(job.EventSeverity), job.EventType, job.EventTitle)),\n\t\t\tjob,\n\t\t)\n\t\tbody := renderTemplate(coalesce(job.RuleBodyTemplate, defaultBody(job)), job)\n\n\t\tproviderMessageID, err := d.sendByChannelType(context.Background(), job.ChannelType, channelConfig, recipients, subject, body)\n\n\t\tif err != nil {\n\t\t\tnextAttempt := job.AttemptCount + 1\n\t\t\tif nextAttempt >= job.MaxAttempts {\n\t\t\t\tfailureMessage := err.Error()\n\t\t\t\tif escalationNote := d.tryEscalationForDispatchJob(job, subject, body, failureMessage, nextAttempt); escalationNote != \"\" {\n\t\t\t\t\tfailureMessage = failureMessage + \" | \" + escalationNote\n\t\t\t\t}\n\t\t\t\t_ = d.db.MarkAlertDispatchJobFailed(job.ID, failureMessage)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tbackoff := retryBackoff(nextAttempt)\n\t\t\t_ = d.db.MarkAlertDispatchJobRetry(job.ID, time.Now().UTC().Add(backoff), err.Error())\n\t\t\tcontinue\n\t\t}\n\n\t\tif err := d.db.MarkAlertDispatchJobSent(job.ID, providerMessageID); err != nil {\n\t\t\tslog.Warn(\"Alert dispatcher failed to mark job sent\", \"job\", job.ID, \"error\", err)\n\t\t}\n\t}\n}\n\nfunc (d *Dispatcher) processDueDigests() {\n\tdigests, err := d.db.ListDueAlertRouteDigests(maxJobsPerTick)\n\tif err != nil {\n\t\tslog.Error(\"Alert dispatcher failed to list due digests\", \"error\", err)\n\t\treturn\n\t}\n\tif len(digests) == 0 {\n\t\treturn\n\t}\n\n\tfor _, digest := range digests {\n\t\tif err := d.db.MarkAlertRouteDigestSending(digest.ID); err != nil {\n\t\t\tslog.Warn(\"Alert dispatcher failed to mark digest sending\", \"digest\", digest.ID, \"error\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\trecipients := parseRecipients(digest.RouteRecipientsJSON)\n\t\tif len(recipients) == 0 {\n\t\t\t_ = d.db.MarkAlertRouteDigestFailed(digest.ID, \"digest route has no recipients\")\n\t\t\tcontinue\n\t\t}\n\n\t\tdecrypted, err := crypto.Decrypt(digest.ChannelConfigEncrypted, d.cfg.AppSecretKey)\n\t\tif err != nil {\n\t\t\t_ = d.db.MarkAlertRouteDigestFailed(digest.ID, \"decrypt channel config: \"+err.Error())\n\t\t\tcontinue\n\t\t}\n\t\tvar channelConfig map[string]interface{}\n\t\tif err := json.Unmarshal([]byte(decrypted), &channelConfig); err != nil {\n\t\t\t_ = d.db.MarkAlertRouteDigestFailed(digest.ID, \"parse channel config: \"+err.Error())\n\t\t\tcontinue\n\t\t}\n\n\t\tsubject := fmt.Sprintf(\"[CH-UI Digest][%s][%s] %d events\", strings.ToUpper(digest.Severity), digest.EventType, digest.EventCount)\n\t\tbody := renderDigestBody(digest)\n\n\t\t_, err = d.sendByChannelType(context.Background(), digest.ChannelType, channelConfig, recipients, subject, body)\n\t\tif err != nil {\n\t\t\tnextAttempt := digest.AttemptCount + 1\n\t\t\tif nextAttempt >= digest.MaxAttempts {\n\t\t\t\tfailureMessage := err.Error()\n\t\t\t\tif escalationNote := d.tryEscalationForDigest(digest, subject, body, failureMessage, nextAttempt); escalationNote != \"\" {\n\t\t\t\t\tfailureMessage = failureMessage + \" | \" + escalationNote\n\t\t\t\t}\n\t\t\t\t_ = d.db.MarkAlertRouteDigestFailed(digest.ID, failureMessage)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tbackoff := retryBackoff(nextAttempt)\n\t\t\t_ = d.db.MarkAlertRouteDigestRetry(digest.ID, time.Now().UTC().Add(backoff), err.Error())\n\t\t\tcontinue\n\t\t}\n\n\t\tif err := d.db.MarkAlertRouteDigestSent(digest.ID); err != nil {\n\t\t\tslog.Warn(\"Alert dispatcher failed to mark digest sent\", \"digest\", digest.ID, \"error\", err)\n\t\t}\n\t}\n}\n\n// SendDirect sends a one-off notification without queueing.\nfunc SendDirect(ctx context.Context, channelType string, channelConfig map[string]interface{}, recipients []string, subject, body string) (string, error) {\n\td := &Dispatcher{\n\t\thttp: &http.Client{Timeout: 15 * time.Second},\n\t}\n\treturn d.sendByChannelType(ctx, channelType, channelConfig, recipients, subject, body)\n}\n\nfunc (d *Dispatcher) sendByChannelType(ctx context.Context, channelType string, channelConfig map[string]interface{}, recipients []string, subject, body string) (string, error) {\n\tswitch strings.ToLower(channelType) {\n\tcase ChannelTypeSMTP:\n\t\treturn d.sendSMTP(ctx, channelConfig, recipients, subject, body)\n\tcase ChannelTypeResend:\n\t\treturn d.sendResend(ctx, channelConfig, recipients, subject, body)\n\tcase ChannelTypeBrevo:\n\t\treturn d.sendBrevo(ctx, channelConfig, recipients, subject, body)\n\tdefault:\n\t\treturn \"\", fmt.Errorf(\"unsupported channel type: %s\", channelType)\n\t}\n}\n\nfunc ruleMatchesEvent(rule database.AlertRule, event database.AlertEvent) bool {\n\teventType := strings.ToLower(strings.TrimSpace(event.EventType))\n\truleType := strings.ToLower(strings.TrimSpace(rule.EventType))\n\tif ruleType != \"*\" && ruleType != \"any\" && ruleType != eventType {\n\t\treturn false\n\t}\n\treturn severityRank(event.Severity) >= severityRank(rule.SeverityMin)\n}\n\nfunc severityRank(s string) int {\n\tswitch strings.ToLower(strings.TrimSpace(s)) {\n\tcase SeverityInfo:\n\t\treturn 1\n\tcase SeverityWarn:\n\t\treturn 2\n\tcase SeverityError:\n\t\treturn 3\n\tcase SeverityCritical:\n\t\treturn 4\n\tdefault:\n\t\treturn 0\n\t}\n}\n\nfunc parseRecipients(raw string) []string {\n\tif strings.TrimSpace(raw) == \"\" {\n\t\treturn []string{}\n\t}\n\tvar vals []string\n\tif err := json.Unmarshal([]byte(raw), &vals); err != nil {\n\t\treturn []string{}\n\t}\n\tout := make([]string, 0, len(vals))\n\tfor _, v := range vals {\n\t\tv = strings.TrimSpace(v)\n\t\tif v != \"\" {\n\t\t\tout = append(out, v)\n\t\t}\n\t}\n\treturn out\n}\n\nfunc parseStringList(raw string) []string {\n\tif strings.TrimSpace(raw) == \"\" {\n\t\treturn []string{}\n\t}\n\tvar vals []string\n\tif err := json.Unmarshal([]byte(raw), &vals); err != nil {\n\t\treturn []string{}\n\t}\n\tout := make([]string, 0, len(vals))\n\tfor _, v := range vals {\n\t\tv = strings.TrimSpace(v)\n\t\tif v != \"\" {\n\t\t\tout = append(out, v)\n\t\t}\n\t}\n\treturn out\n}\n\nfunc defaultBody(job database.AlertDispatchJobWithDetails) string {\n\tvar b strings.Builder\n\tb.WriteString(\"CH-UI Alert\\n\\n\")\n\tb.WriteString(\"Type: \" + job.EventType + \"\\n\")\n\tb.WriteString(\"Severity: \" + strings.ToUpper(job.EventSeverity) + \"\\n\")\n\tb.WriteString(\"Title: \" + job.EventTitle + \"\\n\")\n\tb.WriteString(\"Message: \" + job.EventMessage + \"\\n\")\n\tb.WriteString(\"Channel: \" + job.ChannelName + \" (\" + job.ChannelType + \")\\n\")\n\tif job.EventPayloadJSON != nil && strings.TrimSpace(*job.EventPayloadJSON) != \"\" {\n\t\tb.WriteString(\"\\nPayload:\\n\")\n\t\tb.WriteString(*job.EventPayloadJSON)\n\t}\n\treturn b.String()\n}\n\nfunc renderTemplate(tpl string, job database.AlertDispatchJobWithDetails) string {\n\tout := tpl\n\trepl := map[string]string{\n\t\t\"{{event_type}}\":   job.EventType,\n\t\t\"{{severity}}\":     job.EventSeverity,\n\t\t\"{{title}}\":        job.EventTitle,\n\t\t\"{{message}}\":      job.EventMessage,\n\t\t\"{{channel_name}}\": job.ChannelName,\n\t\t\"{{channel_type}}\": job.ChannelType,\n\t\t\"{{payload_json}}\": coalesce(job.EventPayloadJSON, \"\"),\n\t\t\"{{created_at}}\":   job.CreatedAt,\n\t\t\"{{event_id}}\":     job.EventID,\n\t\t\"{{rule_name}}\":    job.RuleName,\n\t}\n\tfor key, val := range repl {\n\t\tout = strings.ReplaceAll(out, key, val)\n\t}\n\treturn out\n}\n\nfunc retryBackoff(attempt int) time.Duration {\n\tif attempt <= 0 {\n\t\treturn 10 * time.Second\n\t}\n\tbase := 10 * time.Second\n\tmultiplier := math.Pow(2, float64(attempt-1))\n\td := time.Duration(multiplier * float64(base))\n\tif d > 30*time.Minute {\n\t\treturn 30 * time.Minute\n\t}\n\treturn d\n}\n\nfunc (d *Dispatcher) tryEscalationForDispatchJob(job database.AlertDispatchJobWithDetails, subject, body, rootErr string, failedAttempt int) string {\n\tif job.RouteEscalationChannelID == nil || strings.TrimSpace(*job.RouteEscalationChannelID) == \"\" {\n\t\treturn \"\"\n\t}\n\tif job.RouteEscalationAfterFailures > 0 && failedAttempt < job.RouteEscalationAfterFailures {\n\t\treturn \"\"\n\t}\n\tif job.EscalationChannelType == nil || job.EscalationChannelConfigEncrypted == nil {\n\t\treturn \"escalation skipped: channel metadata unavailable\"\n\t}\n\trecipients := parseRecipients(coalesce(job.RouteEscalationRecipientsJSON, \"\"))\n\tif len(recipients) == 0 {\n\t\trecipients = parseRecipients(job.RouteRecipientsJSON)\n\t}\n\tif len(recipients) == 0 {\n\t\treturn \"escalation skipped: no escalation recipients\"\n\t}\n\tdecrypted, err := crypto.Decrypt(*job.EscalationChannelConfigEncrypted, d.cfg.AppSecretKey)\n\tif err != nil {\n\t\treturn \"escalation decrypt failed: \" + err.Error()\n\t}\n\tcfg := map[string]interface{}{}\n\tif err := json.Unmarshal([]byte(decrypted), &cfg); err != nil {\n\t\treturn \"escalation config parse failed: \" + err.Error()\n\t}\n\tescalationSubject := \"[ESCALATED] \" + subject\n\tescalationBody := body + \"\\n\\nEscalation reason:\\n\" + rootErr\n\tif _, err := d.sendByChannelType(context.Background(), *job.EscalationChannelType, cfg, recipients, escalationSubject, escalationBody); err != nil {\n\t\treturn \"escalation send failed: \" + err.Error()\n\t}\n\treturn \"escalated via \" + coalesce(job.EscalationChannelName, \"channel\")\n}\n\nfunc (d *Dispatcher) tryEscalationForDigest(digest database.AlertRouteDigestWithDetails, subject, body, rootErr string, failedAttempt int) string {\n\tif digest.EscalationChannelID == nil || strings.TrimSpace(*digest.EscalationChannelID) == \"\" {\n\t\treturn \"\"\n\t}\n\tif digest.EscalationAfterFailures > 0 && failedAttempt < digest.EscalationAfterFailures {\n\t\treturn \"\"\n\t}\n\tif digest.EscalationChannelType == nil || digest.EscalationChannelConfigEncrypted == nil {\n\t\treturn \"digest escalation skipped: channel metadata unavailable\"\n\t}\n\trecipients := parseRecipients(coalesce(digest.EscalationRecipientsJSON, \"\"))\n\tif len(recipients) == 0 {\n\t\trecipients = parseRecipients(digest.RouteRecipientsJSON)\n\t}\n\tif len(recipients) == 0 {\n\t\treturn \"digest escalation skipped: no recipients\"\n\t}\n\tdecrypted, err := crypto.Decrypt(*digest.EscalationChannelConfigEncrypted, d.cfg.AppSecretKey)\n\tif err != nil {\n\t\treturn \"digest escalation decrypt failed: \" + err.Error()\n\t}\n\tcfg := map[string]interface{}{}\n\tif err := json.Unmarshal([]byte(decrypted), &cfg); err != nil {\n\t\treturn \"digest escalation config parse failed: \" + err.Error()\n\t}\n\tescalationSubject := \"[ESCALATED] \" + subject\n\tescalationBody := body + \"\\n\\nEscalation reason:\\n\" + rootErr\n\tif _, err := d.sendByChannelType(context.Background(), *digest.EscalationChannelType, cfg, recipients, escalationSubject, escalationBody); err != nil {\n\t\treturn \"digest escalation send failed: \" + err.Error()\n\t}\n\treturn \"digest escalated via \" + coalesce(digest.EscalationChannelName, \"channel\")\n}\n\nfunc renderDigestBody(digest database.AlertRouteDigestWithDetails) string {\n\ttitles := parseStringList(digest.TitlesJSON)\n\tvar b strings.Builder\n\tb.WriteString(\"CH-UI Alert Digest\\n\\n\")\n\tb.WriteString(\"Event type: \" + digest.EventType + \"\\n\")\n\tb.WriteString(\"Severity: \" + strings.ToUpper(digest.Severity) + \"\\n\")\n\tb.WriteString(\"Events in window: \" + strconv.Itoa(digest.EventCount) + \"\\n\")\n\tb.WriteString(\"Window: \" + digest.BucketStart + \" -> \" + digest.BucketEnd + \"\\n\")\n\tb.WriteString(\"Channel: \" + digest.ChannelName + \" (\" + digest.ChannelType + \")\\n\")\n\tif len(titles) > 0 {\n\t\tb.WriteString(\"\\nTitles:\\n\")\n\t\tfor i, title := range titles {\n\t\t\tb.WriteString(fmt.Sprintf(\"%d. %s\\n\", i+1, title))\n\t\t\tif i >= 14 {\n\t\t\t\tremaining := len(titles) - (i + 1)\n\t\t\t\tif remaining > 0 {\n\t\t\t\t\tb.WriteString(fmt.Sprintf(\"... and %d more\\n\", remaining))\n\t\t\t\t}\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\treturn b.String()\n}\n\nfunc coalesce(v *string, fallback string) string {\n\tif v == nil || strings.TrimSpace(*v) == \"\" {\n\t\treturn fallback\n\t}\n\treturn *v\n}\n\nfunc stringCfg(cfg map[string]interface{}, key string) string {\n\tv := strings.TrimSpace(fmt.Sprintf(\"%v\", cfg[key]))\n\tif v == \"<nil>\" {\n\t\treturn \"\"\n\t}\n\treturn v\n}\n\nfunc boolCfg(cfg map[string]interface{}, key string, defaultVal bool) bool {\n\traw, ok := cfg[key]\n\tif !ok {\n\t\treturn defaultVal\n\t}\n\tswitch v := raw.(type) {\n\tcase bool:\n\t\treturn v\n\tcase float64:\n\t\treturn v != 0\n\tcase string:\n\t\tval := strings.ToLower(strings.TrimSpace(v))\n\t\treturn val == \"1\" || val == \"true\" || val == \"yes\"\n\tdefault:\n\t\treturn defaultVal\n\t}\n}\n\nfunc intCfg(cfg map[string]interface{}, key string, defaultVal int) int {\n\traw, ok := cfg[key]\n\tif !ok {\n\t\treturn defaultVal\n\t}\n\tswitch v := raw.(type) {\n\tcase float64:\n\t\treturn int(v)\n\tcase int:\n\t\treturn v\n\tcase string:\n\t\tif n, err := strconv.Atoi(strings.TrimSpace(v)); err == nil {\n\t\t\treturn n\n\t\t}\n\t}\n\treturn defaultVal\n}\n\nfunc (d *Dispatcher) sendSMTP(ctx context.Context, cfg map[string]interface{}, recipients []string, subject, body string) (string, error) {\n\thost := stringCfg(cfg, \"host\")\n\tfromEmail := stringCfg(cfg, \"from_email\")\n\tusername := stringCfg(cfg, \"username\")\n\tpassword := stringCfg(cfg, \"password\")\n\tfromName := stringCfg(cfg, \"from_name\")\n\tif host == \"\" || fromEmail == \"\" {\n\t\treturn \"\", fmt.Errorf(\"smtp config requires host and from_email\")\n\t}\n\n\tport := intCfg(cfg, \"port\", 587)\n\taddr := fmt.Sprintf(\"%s:%d\", host, port)\n\tuseTLS := boolCfg(cfg, \"use_tls\", false)\n\tstartTLS := boolCfg(cfg, \"starttls\", !useTLS)\n\tinsecureSkipVerify := boolCfg(cfg, \"insecure_skip_verify\", false)\n\n\tfromHeader := fromEmail\n\tif fromName != \"\" {\n\t\tfromHeader = fmt.Sprintf(\"%s <%s>\", fromName, fromEmail)\n\t}\n\n\tmsg := []byte(\"From: \" + fromHeader + \"\\r\\n\" +\n\t\t\"To: \" + strings.Join(recipients, \",\") + \"\\r\\n\" +\n\t\t\"Subject: \" + subject + \"\\r\\n\" +\n\t\t\"MIME-Version: 1.0\\r\\n\" +\n\t\t\"Content-Type: text/plain; charset=UTF-8\\r\\n\\r\\n\" +\n\t\tbody)\n\n\tvar auth smtp.Auth\n\tif username != \"\" {\n\t\tauth = smtp.PlainAuth(\"\", username, password, host)\n\t}\n\n\tif useTLS {\n\t\tconn, err := tls.Dial(\"tcp\", addr, &tls.Config{\n\t\t\tServerName:         host,\n\t\t\tInsecureSkipVerify: insecureSkipVerify,\n\t\t})\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"smtp tls dial: %w\", err)\n\t\t}\n\t\tdefer conn.Close()\n\n\t\tclient, err := smtp.NewClient(conn, host)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"smtp new client: %w\", err)\n\t\t}\n\t\tdefer client.Close()\n\n\t\tif auth != nil {\n\t\t\tif err := client.Auth(auth); err != nil {\n\t\t\t\treturn \"\", fmt.Errorf(\"smtp auth: %w\", err)\n\t\t\t}\n\t\t}\n\t\tif err := client.Mail(fromEmail); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"smtp mail: %w\", err)\n\t\t}\n\t\tfor _, rcpt := range recipients {\n\t\t\tif err := client.Rcpt(rcpt); err != nil {\n\t\t\t\treturn \"\", fmt.Errorf(\"smtp rcpt %s: %w\", rcpt, err)\n\t\t\t}\n\t\t}\n\t\tw, err := client.Data()\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"smtp data: %w\", err)\n\t\t}\n\t\tif _, err := w.Write(msg); err != nil {\n\t\t\t_ = w.Close()\n\t\t\treturn \"\", fmt.Errorf(\"smtp write: %w\", err)\n\t\t}\n\t\tif err := w.Close(); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"smtp close data: %w\", err)\n\t\t}\n\t\tif err := client.Quit(); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"smtp quit: %w\", err)\n\t\t}\n\t\treturn \"smtp\", nil\n\t}\n\n\tif startTLS {\n\t\tclient, err := smtp.Dial(addr)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"smtp dial: %w\", err)\n\t\t}\n\t\tdefer client.Close()\n\n\t\tif ok, _ := client.Extension(\"STARTTLS\"); ok {\n\t\t\tif err := client.StartTLS(&tls.Config{\n\t\t\t\tServerName:         host,\n\t\t\t\tInsecureSkipVerify: insecureSkipVerify,\n\t\t\t}); err != nil {\n\t\t\t\treturn \"\", fmt.Errorf(\"smtp starttls: %w\", err)\n\t\t\t}\n\t\t}\n\t\tif auth != nil {\n\t\t\tif err := client.Auth(auth); err != nil {\n\t\t\t\treturn \"\", fmt.Errorf(\"smtp auth: %w\", err)\n\t\t\t}\n\t\t}\n\t\tif err := client.Mail(fromEmail); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"smtp mail: %w\", err)\n\t\t}\n\t\tfor _, rcpt := range recipients {\n\t\t\tif err := client.Rcpt(rcpt); err != nil {\n\t\t\t\treturn \"\", fmt.Errorf(\"smtp rcpt %s: %w\", rcpt, err)\n\t\t\t}\n\t\t}\n\t\tw, err := client.Data()\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"smtp data: %w\", err)\n\t\t}\n\t\tif _, err := w.Write(msg); err != nil {\n\t\t\t_ = w.Close()\n\t\t\treturn \"\", fmt.Errorf(\"smtp write: %w\", err)\n\t\t}\n\t\tif err := w.Close(); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"smtp close data: %w\", err)\n\t\t}\n\t\tif err := client.Quit(); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"smtp quit: %w\", err)\n\t\t}\n\t\treturn \"smtp\", nil\n\t}\n\n\tif err := smtp.SendMail(addr, auth, fromEmail, recipients, msg); err != nil {\n\t\treturn \"\", fmt.Errorf(\"smtp sendmail: %w\", err)\n\t}\n\treturn \"smtp\", nil\n}\n\nfunc (d *Dispatcher) sendResend(ctx context.Context, cfg map[string]interface{}, recipients []string, subject, body string) (string, error) {\n\tapiKey := stringCfg(cfg, \"api_key\")\n\tfromEmail := stringCfg(cfg, \"from_email\")\n\tfromName := stringCfg(cfg, \"from_name\")\n\tbaseURL := stringCfg(cfg, \"base_url\")\n\tif baseURL == \"\" {\n\t\tbaseURL = \"https://api.resend.com\"\n\t}\n\tif apiKey == \"\" || fromEmail == \"\" {\n\t\treturn \"\", fmt.Errorf(\"resend config requires api_key and from_email\")\n\t}\n\n\tfrom := fromEmail\n\tif fromName != \"\" {\n\t\tfrom = fmt.Sprintf(\"%s <%s>\", fromName, fromEmail)\n\t}\n\tpayload := map[string]interface{}{\n\t\t\"from\":    from,\n\t\t\"to\":      recipients,\n\t\t\"subject\": subject,\n\t\t\"text\":    body,\n\t}\n\traw, _ := json.Marshal(payload)\n\n\treq, err := http.NewRequestWithContext(ctx, http.MethodPost, strings.TrimRight(baseURL, \"/\")+\"/emails\", bytes.NewReader(raw))\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"resend request: %w\", err)\n\t}\n\treq.Header.Set(\"Authorization\", \"Bearer \"+apiKey)\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\treq.Header.Set(\"Accept\", \"application/json\")\n\n\tresp, err := d.http.Do(req)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"resend send: %w\", err)\n\t}\n\tdefer resp.Body.Close()\n\tdata, _ := io.ReadAll(resp.Body)\n\tif resp.StatusCode >= 300 {\n\t\treturn \"\", fmt.Errorf(\"resend error (%d): %s\", resp.StatusCode, strings.TrimSpace(string(data)))\n\t}\n\n\tvar out struct {\n\t\tID string `json:\"id\"`\n\t}\n\t_ = json.Unmarshal(data, &out)\n\treturn out.ID, nil\n}\n\nfunc (d *Dispatcher) sendBrevo(ctx context.Context, cfg map[string]interface{}, recipients []string, subject, body string) (string, error) {\n\tapiKey := stringCfg(cfg, \"api_key\")\n\tfromEmail := stringCfg(cfg, \"from_email\")\n\tfromName := stringCfg(cfg, \"from_name\")\n\tbaseURL := stringCfg(cfg, \"base_url\")\n\tif baseURL == \"\" {\n\t\tbaseURL = \"https://api.brevo.com\"\n\t}\n\tif apiKey == \"\" || fromEmail == \"\" {\n\t\treturn \"\", fmt.Errorf(\"brevo config requires api_key and from_email\")\n\t}\n\n\tto := make([]map[string]string, 0, len(recipients))\n\tfor _, r := range recipients {\n\t\tto = append(to, map[string]string{\"email\": r})\n\t}\n\n\tpayload := map[string]interface{}{\n\t\t\"sender\": map[string]string{\n\t\t\t\"name\":  fromName,\n\t\t\t\"email\": fromEmail,\n\t\t},\n\t\t\"to\":          to,\n\t\t\"subject\":     subject,\n\t\t\"textContent\": body,\n\t}\n\traw, _ := json.Marshal(payload)\n\n\treq, err := http.NewRequestWithContext(ctx, http.MethodPost, strings.TrimRight(baseURL, \"/\")+\"/v3/smtp/email\", bytes.NewReader(raw))\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"brevo request: %w\", err)\n\t}\n\treq.Header.Set(\"api-key\", apiKey)\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\treq.Header.Set(\"Accept\", \"application/json\")\n\n\tresp, err := d.http.Do(req)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"brevo send: %w\", err)\n\t}\n\tdefer resp.Body.Close()\n\tdata, _ := io.ReadAll(resp.Body)\n\tif resp.StatusCode >= 300 {\n\t\treturn \"\", fmt.Errorf(\"brevo error (%d): %s\", resp.StatusCode, strings.TrimSpace(string(data)))\n\t}\n\n\tvar out struct {\n\t\tMessageID string `json:\"messageId\"`\n\t}\n\t_ = json.Unmarshal(data, &out)\n\treturn out.MessageID, nil\n}\n"
  },
  {
    "path": "internal/brain/provider.go",
    "content": "package brain\n\nimport (\n\t\"bufio\"\n\t\"bytes\"\n\t\"context\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"strings\"\n)\n\n// Message represents one chat message for provider calls.\ntype Message struct {\n\tRole    string `json:\"role\"`\n\tContent string `json:\"content\"`\n}\n\n// ProviderConfig defines provider runtime configuration.\ntype ProviderConfig struct {\n\tKind    string\n\tBaseURL string\n\tAPIKey  string\n}\n\n// ChatResult holds optional metadata returned after streaming completes.\ntype ChatResult struct {\n\tInputTokens     int\n\tOutputTokens    int\n\tModelParameters map[string]interface{}\n}\n\n// Provider handles streaming chat and model discovery.\ntype Provider interface {\n\tStreamChat(ctx context.Context, cfg ProviderConfig, model string, messages []Message, onDelta func(string) error) (*ChatResult, error)\n\tListModels(ctx context.Context, cfg ProviderConfig) ([]string, error)\n}\n\nfunc NewProvider(kind string) (Provider, error) {\n\tswitch strings.ToLower(strings.TrimSpace(kind)) {\n\tcase \"openai\", \"openai_compatible\":\n\t\treturn &openAIProvider{client: &http.Client{}}, nil\n\tcase \"ollama\":\n\t\treturn &ollamaProvider{client: &http.Client{}}, nil\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unsupported provider kind: %s\", kind)\n\t}\n}\n\n// -------- OpenAI provider --------\n\ntype openAIProvider struct {\n\tclient *http.Client\n}\n\ntype openAIRequest struct {\n\tModel         string               `json:\"model\"`\n\tMessages      []Message            `json:\"messages\"`\n\tStream        bool                 `json:\"stream\"`\n\tTemperature   *float64             `json:\"temperature,omitempty\"`\n\tStreamOptions *openAIStreamOptions `json:\"stream_options,omitempty\"`\n}\n\ntype openAIStreamOptions struct {\n\tIncludeUsage bool `json:\"include_usage\"`\n}\n\ntype openAIChunk struct {\n\tChoices []struct {\n\t\tDelta struct {\n\t\t\tContent string `json:\"content\"`\n\t\t} `json:\"delta\"`\n\t} `json:\"choices\"`\n\tUsage *struct {\n\t\tPromptTokens     int `json:\"prompt_tokens\"`\n\t\tCompletionTokens int `json:\"completion_tokens\"`\n\t} `json:\"usage\"`\n}\n\nconst openAIDefaultTemperature = 0.1\n\nfunc ensureOpenAIV1Base(rawBase string) string {\n\ttrimmed := strings.TrimRight(strings.TrimSpace(rawBase), \"/\")\n\tif trimmed == \"\" {\n\t\treturn \"https://api.openai.com/v1\"\n\t}\n\tparsed, err := url.Parse(trimmed)\n\tif err != nil {\n\t\tif strings.HasSuffix(trimmed, \"/v1\") {\n\t\t\treturn trimmed\n\t\t}\n\t\treturn trimmed + \"/v1\"\n\t}\n\tpath := strings.TrimRight(strings.TrimSpace(parsed.Path), \"/\")\n\tif path == \"/v1\" || strings.HasSuffix(path, \"/v1\") {\n\t\treturn strings.TrimRight(parsed.String(), \"/\")\n\t}\n\tif path == \"\" || path == \"/\" {\n\t\tparsed.Path = \"/v1\"\n\t} else {\n\t\tparsed.Path = path + \"/v1\"\n\t}\n\treturn strings.TrimRight(parsed.String(), \"/\")\n}\n\nfunc shouldRetryOpenAIV1(status int, body []byte) bool {\n\tif status != http.StatusNotFound {\n\t\treturn false\n\t}\n\tmsg := strings.ToLower(string(body))\n\treturn strings.Contains(msg, \"invalid url\") || strings.Contains(msg, \"/models\") || strings.Contains(msg, \"/chat/completions\")\n}\n\nfunc (p *openAIProvider) baseURL(cfg ProviderConfig) string {\n\traw := strings.TrimSpace(cfg.BaseURL)\n\tif raw == \"\" {\n\t\treturn \"https://api.openai.com/v1\"\n\t}\n\n\tparsed, err := url.Parse(raw)\n\tif err != nil {\n\t\treturn strings.TrimRight(raw, \"/\")\n\t}\n\n\tbase := strings.TrimRight(parsed.String(), \"/\")\n\tpath := strings.TrimSpace(parsed.Path)\n\t// For OpenAI-style APIs, a root URL should target /v1 endpoints.\n\tif path == \"\" || path == \"/\" {\n\t\treturn strings.TrimRight(base, \"/\") + \"/v1\"\n\t}\n\treturn base\n}\n\n// DefaultModelParameters returns the default provider request parameters for one model.\nfunc DefaultModelParameters(kind, model string) map[string]interface{} {\n\tswitch strings.ToLower(strings.TrimSpace(kind)) {\n\tcase \"openai\", \"openai_compatible\":\n\t\treturn openAIModelParameters(openAIRequestTemperature(model))\n\tdefault:\n\t\treturn nil\n\t}\n}\n\nfunc openAIRequestTemperature(model string) *float64 {\n\tif openAIModelRequiresDefaultTemperature(model) {\n\t\treturn nil\n\t}\n\ttemperature := openAIDefaultTemperature\n\treturn &temperature\n}\n\nfunc openAIModelRequiresDefaultTemperature(model string) bool {\n\tname := strings.ToLower(strings.TrimSpace(model))\n\tif slash := strings.LastIndex(name, \"/\"); slash >= 0 {\n\t\tname = name[slash+1:]\n\t}\n\treturn strings.HasPrefix(name, \"o1\") || strings.HasPrefix(name, \"o3\") || strings.HasPrefix(name, \"o4\")\n}\n\nfunc openAIModelParameters(temperature *float64) map[string]interface{} {\n\tparams := map[string]interface{}{}\n\tif temperature != nil {\n\t\tparams[\"temperature\"] = *temperature\n\t}\n\treturn params\n}\n\nfunc isUnsupportedOpenAITemperature(status int, body []byte) bool {\n\tif status != http.StatusBadRequest {\n\t\treturn false\n\t}\n\tmsg := strings.ToLower(string(body))\n\treturn strings.Contains(msg, \"temperature\") &&\n\t\t(strings.Contains(msg, \"unsupported value\") ||\n\t\t\tstrings.Contains(msg, \"does not support\") ||\n\t\t\tstrings.Contains(msg, \"only the default\"))\n}\n\nfunc (p *openAIProvider) StreamChat(ctx context.Context, cfg ProviderConfig, model string, messages []Message, onDelta func(string) error) (*ChatResult, error) {\n\tif strings.TrimSpace(cfg.APIKey) == \"\" {\n\t\treturn nil, errors.New(\"provider API key is not configured\")\n\t}\n\n\ttemperatures := []*float64{openAIRequestTemperature(model)}\n\tif temperatures[0] != nil {\n\t\ttemperatures = append(temperatures, nil)\n\t}\n\n\tvar lastStatus int\n\tvar lastErrBody []byte\n\tfor attemptIdx, temperature := range temperatures {\n\t\tresult, status, errBody, err := p.streamChatAttempt(ctx, cfg, model, messages, temperature, onDelta)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif status == 0 {\n\t\t\treturn result, nil\n\t\t}\n\t\tlastStatus = status\n\t\tlastErrBody = errBody\n\t\tif attemptIdx < len(temperatures)-1 && isUnsupportedOpenAITemperature(status, errBody) {\n\t\t\tcontinue\n\t\t}\n\t\treturn nil, fmt.Errorf(\"provider error (%d): %s\", status, string(errBody))\n\t}\n\n\tif lastStatus != 0 {\n\t\treturn nil, fmt.Errorf(\"provider error (%d): %s\", lastStatus, string(lastErrBody))\n\t}\n\treturn nil, errors.New(\"provider request failed\")\n}\n\nfunc (p *openAIProvider) streamChatAttempt(ctx context.Context, cfg ProviderConfig, model string, messages []Message, temperature *float64, onDelta func(string) error) (*ChatResult, int, []byte, error) {\n\tpayload := openAIRequest{\n\t\tModel:         model,\n\t\tMessages:      messages,\n\t\tStream:        true,\n\t\tTemperature:   temperature,\n\t\tStreamOptions: &openAIStreamOptions{IncludeUsage: true},\n\t}\n\tbody, err := json.Marshal(payload)\n\tif err != nil {\n\t\treturn nil, 0, nil, fmt.Errorf(\"marshal provider request: %w\", err)\n\t}\n\n\tprimaryBase := p.baseURL(cfg)\n\tbases := []string{primaryBase}\n\tv1Fallback := ensureOpenAIV1Base(primaryBase)\n\tif v1Fallback != primaryBase {\n\t\tbases = append(bases, v1Fallback)\n\t}\n\n\tvar lastStatus int\n\tvar lastErrBody []byte\n\n\tfor idx, base := range bases {\n\t\tendpoint := base + \"/chat/completions\"\n\t\treq, reqErr := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(body))\n\t\tif reqErr != nil {\n\t\t\treturn nil, 0, nil, fmt.Errorf(\"create provider request: %w\", reqErr)\n\t\t}\n\t\treq.Header.Set(\"Content-Type\", \"application/json\")\n\t\treq.Header.Set(\"Authorization\", \"Bearer \"+cfg.APIKey)\n\n\t\tresp, doErr := p.client.Do(req)\n\t\tif doErr != nil {\n\t\t\treturn nil, 0, nil, fmt.Errorf(\"provider request failed: %w\", doErr)\n\t\t}\n\n\t\tif resp.StatusCode < 200 || resp.StatusCode >= 300 {\n\t\t\terrBody, _ := io.ReadAll(resp.Body)\n\t\t\tresp.Body.Close()\n\t\t\tlastStatus = resp.StatusCode\n\t\t\tlastErrBody = errBody\n\t\t\tif idx < len(bases)-1 && shouldRetryOpenAIV1(resp.StatusCode, errBody) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\treturn nil, resp.StatusCode, errBody, nil\n\t\t}\n\n\t\tresult := ChatResult{ModelParameters: openAIModelParameters(temperature)}\n\t\tscanner := bufio.NewScanner(resp.Body)\n\t\tfor scanner.Scan() {\n\t\t\tline := scanner.Text()\n\t\t\tif !strings.HasPrefix(line, \"data: \") {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tdata := strings.TrimPrefix(line, \"data: \")\n\t\t\tif data == \"[DONE]\" {\n\t\t\t\tresp.Body.Close()\n\t\t\t\treturn &result, 0, nil, nil\n\t\t\t}\n\n\t\t\tvar chunk openAIChunk\n\t\t\tif err := json.Unmarshal([]byte(data), &chunk); err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif chunk.Usage != nil {\n\t\t\t\tresult.InputTokens = chunk.Usage.PromptTokens\n\t\t\t\tresult.OutputTokens = chunk.Usage.CompletionTokens\n\t\t\t}\n\t\t\tfor _, c := range chunk.Choices {\n\t\t\t\tif c.Delta.Content == \"\" {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tif err := onDelta(c.Delta.Content); err != nil {\n\t\t\t\t\tresp.Body.Close()\n\t\t\t\t\treturn nil, 0, nil, err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif err := scanner.Err(); err != nil {\n\t\t\tresp.Body.Close()\n\t\t\treturn nil, 0, nil, fmt.Errorf(\"read provider stream: %w\", err)\n\t\t}\n\t\tresp.Body.Close()\n\t\treturn &result, 0, nil, nil\n\t}\n\n\tif lastStatus != 0 {\n\t\treturn nil, lastStatus, lastErrBody, nil\n\t}\n\treturn nil, 0, nil, errors.New(\"provider request failed\")\n}\n\nfunc (p *openAIProvider) ListModels(ctx context.Context, cfg ProviderConfig) ([]string, error) {\n\tif strings.TrimSpace(cfg.APIKey) == \"\" {\n\t\treturn nil, errors.New(\"provider API key is not configured\")\n\t}\n\tprimaryBase := p.baseURL(cfg)\n\tbases := []string{primaryBase}\n\tv1Fallback := ensureOpenAIV1Base(primaryBase)\n\tif v1Fallback != primaryBase {\n\t\tbases = append(bases, v1Fallback)\n\t}\n\n\tvar lastStatus int\n\tvar lastErrBody []byte\n\n\tfor idx, base := range bases {\n\t\tendpoint := base + \"/models\"\n\t\treq, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"create provider request: %w\", err)\n\t\t}\n\t\treq.Header.Set(\"Authorization\", \"Bearer \"+cfg.APIKey)\n\n\t\tresp, err := p.client.Do(req)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"provider request failed: %w\", err)\n\t\t}\n\n\t\tif resp.StatusCode < 200 || resp.StatusCode >= 300 {\n\t\t\terrBody, _ := io.ReadAll(resp.Body)\n\t\t\tresp.Body.Close()\n\t\t\tlastStatus = resp.StatusCode\n\t\t\tlastErrBody = errBody\n\t\t\tif idx < len(bases)-1 && shouldRetryOpenAIV1(resp.StatusCode, errBody) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\treturn nil, fmt.Errorf(\"provider error (%d): %s\", resp.StatusCode, string(errBody))\n\t\t}\n\n\t\tvar parsed struct {\n\t\t\tData []struct {\n\t\t\t\tID string `json:\"id\"`\n\t\t\t} `json:\"data\"`\n\t\t}\n\t\tif err := json.NewDecoder(resp.Body).Decode(&parsed); err != nil {\n\t\t\tresp.Body.Close()\n\t\t\treturn nil, fmt.Errorf(\"decode models response: %w\", err)\n\t\t}\n\t\tresp.Body.Close()\n\n\t\tmodels := make([]string, 0, len(parsed.Data))\n\t\tfor _, item := range parsed.Data {\n\t\t\tif strings.TrimSpace(item.ID) == \"\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tmodels = append(models, item.ID)\n\t\t}\n\t\treturn models, nil\n\t}\n\n\tif lastStatus != 0 {\n\t\treturn nil, fmt.Errorf(\"provider error (%d): %s\", lastStatus, string(lastErrBody))\n\t}\n\treturn nil, errors.New(\"provider request failed\")\n}\n\n// -------- Ollama provider --------\n\ntype ollamaProvider struct {\n\tclient *http.Client\n}\n\nfunc (p *ollamaProvider) baseURL(cfg ProviderConfig) string {\n\tif strings.TrimSpace(cfg.BaseURL) != \"\" {\n\t\treturn strings.TrimRight(cfg.BaseURL, \"/\")\n\t}\n\treturn \"http://localhost:11434\"\n}\n\nfunc (p *ollamaProvider) StreamChat(ctx context.Context, cfg ProviderConfig, model string, messages []Message, onDelta func(string) error) (*ChatResult, error) {\n\tpayload := map[string]interface{}{\n\t\t\"model\":    model,\n\t\t\"stream\":   true,\n\t\t\"messages\": messages,\n\t}\n\tbody, err := json.Marshal(payload)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"marshal provider request: %w\", err)\n\t}\n\n\turl := p.baseURL(cfg) + \"/api/chat\"\n\treq, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body))\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"create provider request: %w\", err)\n\t}\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\n\tresp, err := p.client.Do(req)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"provider request failed: %w\", err)\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode < 200 || resp.StatusCode >= 300 {\n\t\terrBody, _ := io.ReadAll(resp.Body)\n\t\treturn nil, fmt.Errorf(\"provider error (%d): %s\", resp.StatusCode, string(errBody))\n\t}\n\n\tvar result ChatResult\n\tscanner := bufio.NewScanner(resp.Body)\n\tfor scanner.Scan() {\n\t\tline := strings.TrimSpace(scanner.Text())\n\t\tif line == \"\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tvar chunk struct {\n\t\t\tDone    bool `json:\"done\"`\n\t\t\tMessage struct {\n\t\t\t\tContent string `json:\"content\"`\n\t\t\t} `json:\"message\"`\n\t\t\tError           string `json:\"error\"`\n\t\t\tPromptEvalCount int    `json:\"prompt_eval_count\"`\n\t\t\tEvalCount       int    `json:\"eval_count\"`\n\t\t}\n\t\tif err := json.Unmarshal([]byte(line), &chunk); err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tif chunk.Error != \"\" {\n\t\t\treturn nil, errors.New(chunk.Error)\n\t\t}\n\t\tif chunk.Message.Content != \"\" {\n\t\t\tif err := onDelta(chunk.Message.Content); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\tif chunk.Done {\n\t\t\tresult.InputTokens = chunk.PromptEvalCount\n\t\t\tresult.OutputTokens = chunk.EvalCount\n\t\t\treturn &result, nil\n\t\t}\n\t}\n\n\tif err := scanner.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"read provider stream: %w\", err)\n\t}\n\treturn &result, nil\n}\n\nfunc (p *ollamaProvider) ListModels(ctx context.Context, cfg ProviderConfig) ([]string, error) {\n\turl := p.baseURL(cfg) + \"/api/tags\"\n\treq, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"create provider request: %w\", err)\n\t}\n\n\tresp, err := p.client.Do(req)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"provider request failed: %w\", err)\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode < 200 || resp.StatusCode >= 300 {\n\t\terrBody, _ := io.ReadAll(resp.Body)\n\t\treturn nil, fmt.Errorf(\"provider error (%d): %s\", resp.StatusCode, string(errBody))\n\t}\n\n\tvar parsed struct {\n\t\tModels []struct {\n\t\t\tName string `json:\"name\"`\n\t\t} `json:\"models\"`\n\t}\n\tif err := json.NewDecoder(resp.Body).Decode(&parsed); err != nil {\n\t\treturn nil, fmt.Errorf(\"decode models response: %w\", err)\n\t}\n\n\tmodels := make([]string, 0, len(parsed.Models))\n\tfor _, item := range parsed.Models {\n\t\tif strings.TrimSpace(item.Name) == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tmodels = append(models, item.Name)\n\t}\n\treturn models, nil\n}\n"
  },
  {
    "path": "internal/brain/provider_test.go",
    "content": "package brain\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"net/http/httptest\"\n\t\"strings\"\n\t\"sync\"\n\t\"testing\"\n)\n\nfunc TestOpenAIProviderStreamChatOmitsTemperatureForReasoningModels(t *testing.T) {\n\tt.Parallel()\n\n\tvar attempts int\n\tserver := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tif r.URL.Path != \"/v1/chat/completions\" {\n\t\t\tt.Fatalf(\"unexpected path: %s\", r.URL.Path)\n\t\t}\n\t\tattempts++\n\n\t\tvar payload map[string]interface{}\n\t\tif err := json.NewDecoder(r.Body).Decode(&payload); err != nil {\n\t\t\tt.Fatalf(\"decode request: %v\", err)\n\t\t}\n\t\tif _, ok := payload[\"temperature\"]; ok {\n\t\t\tt.Fatalf(\"temperature should be omitted for o4-mini payloads\")\n\t\t}\n\n\t\twriteOpenAIStreamResponse(w, \"hello\", 3, 1)\n\t}))\n\tdefer server.Close()\n\n\tprovider := &openAIProvider{client: server.Client()}\n\tvar built strings.Builder\n\tresult, err := provider.StreamChat(\n\t\tcontext.Background(),\n\t\tProviderConfig{Kind: \"openai\", BaseURL: server.URL, APIKey: \"test-key\"},\n\t\t\"o4-mini\",\n\t\t[]Message{{Role: \"user\", Content: \"help\"}},\n\t\tfunc(delta string) error {\n\t\t\tbuilt.WriteString(delta)\n\t\t\treturn nil\n\t\t},\n\t)\n\tif err != nil {\n\t\tt.Fatalf(\"StreamChat returned error: %v\", err)\n\t}\n\tif built.String() != \"hello\" {\n\t\tt.Fatalf(\"unexpected streamed content: %q\", built.String())\n\t}\n\tif attempts != 1 {\n\t\tt.Fatalf(\"expected 1 attempt, got %d\", attempts)\n\t}\n\tif result == nil {\n\t\tt.Fatalf(\"expected result\")\n\t}\n\tif result.InputTokens != 3 || result.OutputTokens != 1 {\n\t\tt.Fatalf(\"unexpected usage: %+v\", result)\n\t}\n\tif len(result.ModelParameters) != 0 {\n\t\tt.Fatalf(\"expected no model parameters, got %+v\", result.ModelParameters)\n\t}\n}\n\nfunc TestOpenAIProviderStreamChatRetriesWithoutTemperatureOnUnsupportedValue(t *testing.T) {\n\tt.Parallel()\n\n\tvar (\n\t\tmu       sync.Mutex\n\t\tattempts []bool\n\t)\n\tserver := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tif r.URL.Path != \"/v1/chat/completions\" {\n\t\t\tt.Fatalf(\"unexpected path: %s\", r.URL.Path)\n\t\t}\n\n\t\tvar payload map[string]interface{}\n\t\tif err := json.NewDecoder(r.Body).Decode(&payload); err != nil {\n\t\t\tt.Fatalf(\"decode request: %v\", err)\n\t\t}\n\n\t\t_, hasTemperature := payload[\"temperature\"]\n\t\tmu.Lock()\n\t\tattempts = append(attempts, hasTemperature)\n\t\tmu.Unlock()\n\n\t\tif hasTemperature {\n\t\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\t_, _ = fmt.Fprint(w, `{\"error\":{\"message\":\"Unsupported value: 'temperature' does not support 0.1 with this model. Only the default (1) value is supported.\",\"type\":\"invalid_request_error\",\"param\":\"temperature\",\"code\":\"unsupported_value\"}}`)\n\t\t\treturn\n\t\t}\n\n\t\twriteOpenAIStreamResponse(w, \"fixed\", 5, 2)\n\t}))\n\tdefer server.Close()\n\n\tprovider := &openAIProvider{client: server.Client()}\n\tvar built strings.Builder\n\tresult, err := provider.StreamChat(\n\t\tcontext.Background(),\n\t\tProviderConfig{Kind: \"openai\", BaseURL: server.URL, APIKey: \"test-key\"},\n\t\t\"gpt-4o\",\n\t\t[]Message{{Role: \"user\", Content: \"help\"}},\n\t\tfunc(delta string) error {\n\t\t\tbuilt.WriteString(delta)\n\t\t\treturn nil\n\t\t},\n\t)\n\tif err != nil {\n\t\tt.Fatalf(\"StreamChat returned error: %v\", err)\n\t}\n\tif built.String() != \"fixed\" {\n\t\tt.Fatalf(\"unexpected streamed content: %q\", built.String())\n\t}\n\tmu.Lock()\n\tgotAttempts := append([]bool(nil), attempts...)\n\tmu.Unlock()\n\tif len(gotAttempts) != 2 {\n\t\tt.Fatalf(\"expected 2 attempts, got %d\", len(gotAttempts))\n\t}\n\tif !gotAttempts[0] || gotAttempts[1] {\n\t\tt.Fatalf(\"expected retry sequence [true false], got %v\", gotAttempts)\n\t}\n\tif result == nil {\n\t\tt.Fatalf(\"expected result\")\n\t}\n\tif result.InputTokens != 5 || result.OutputTokens != 2 {\n\t\tt.Fatalf(\"unexpected usage: %+v\", result)\n\t}\n\tif len(result.ModelParameters) != 0 {\n\t\tt.Fatalf(\"expected no model parameters after retry, got %+v\", result.ModelParameters)\n\t}\n}\n\nfunc writeOpenAIStreamResponse(w http.ResponseWriter, content string, inputTokens, outputTokens int) {\n\tw.Header().Set(\"Content-Type\", \"text/event-stream\")\n\t_, _ = fmt.Fprintf(w, \"data: {\\\"choices\\\":[{\\\"delta\\\":{\\\"content\\\":%q}}]}\\n\\n\", content)\n\t_, _ = fmt.Fprintf(w, \"data: {\\\"usage\\\":{\\\"prompt_tokens\\\":%d,\\\"completion_tokens\\\":%d}}\\n\\n\", inputTokens, outputTokens)\n\t_, _ = fmt.Fprint(w, \"data: [DONE]\\n\\n\")\n}\n"
  },
  {
    "path": "internal/config/config.go",
    "content": "package config\n\nimport (\n\t\"log/slog\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"runtime\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/license\"\n\t\"gopkg.in/yaml.v3\"\n)\n\ntype Config struct {\n\t// Server\n\tPort    int\n\tDevMode bool\n\tAppURL  string\n\n\t// Database\n\tDatabasePath string\n\n\t// Security\n\tAppSecretKey   string\n\tSessionMaxAge  int // seconds, default 7 days\n\tAllowedOrigins []string\n\n\t// Tunnel\n\tTunnelURL string\n\n\t// Embedded agent\n\tClickHouseURL  string // default http://localhost:8123\n\tConnectionName string // default Local ClickHouse\n\n\t// License\n\tLicenseJSON string // Stored signed license JSON (loaded from DB at startup)\n}\n\n// serverConfigFile is the YAML structure for the server config file.\ntype serverConfigFile struct {\n\tPort           int      `yaml:\"port\"`\n\tAppURL         string   `yaml:\"app_url\"`\n\tDatabasePath   string   `yaml:\"database_path\"`\n\tClickHouseURL  string   `yaml:\"clickhouse_url\"`\n\tConnectionName string   `yaml:\"connection_name\"`\n\tAppSecretKey   string   `yaml:\"app_secret_key\"`\n\tAllowedOrigins []string `yaml:\"allowed_origins\"`\n\tTunnelURL      string   `yaml:\"tunnel_url\"`\n}\n\n// DefaultServerConfigPath returns the platform-specific default config path.\nfunc DefaultServerConfigPath() string {\n\tswitch runtime.GOOS {\n\tcase \"darwin\":\n\t\thome, _ := os.UserHomeDir()\n\t\treturn filepath.Join(home, \".config\", \"ch-ui\", \"server.yaml\")\n\tdefault:\n\t\treturn \"/etc/ch-ui/server.yaml\"\n\t}\n}\n\n// Load creates a Config by merging: config file -> env vars -> defaults.\n// Priority: env vars > config file > defaults.\nfunc Load(configPath string) *Config {\n\tcfg := &Config{\n\t\tPort:           3488,\n\t\tDatabasePath:   \"./data/ch-ui.db\",\n\t\tAppSecretKey:   DefaultAppSecretKey,\n\t\tSessionMaxAge:  7 * 24 * 60 * 60,\n\t\tClickHouseURL:  \"http://localhost:8123\",\n\t\tConnectionName: \"Local ClickHouse\",\n\t}\n\n\t// 1. Load from config file (overrides defaults)\n\tif configPath != \"\" {\n\t\tif err := loadServerConfigFile(configPath, cfg); err != nil {\n\t\t\tif !os.IsNotExist(err) {\n\t\t\t\tslog.Warn(\"Failed to load config file\", \"path\", configPath, \"error\", err)\n\t\t\t} else {\n\t\t\t\tslog.Warn(\"Config file not found\", \"path\", configPath)\n\t\t\t}\n\t\t} else {\n\t\t\tslog.Info(\"Loaded config file\", \"path\", configPath)\n\t\t}\n\t} else {\n\t\t// Try default path, silently ignore if not found\n\t\tdefaultPath := DefaultServerConfigPath()\n\t\tif err := loadServerConfigFile(defaultPath, cfg); err == nil {\n\t\t\tslog.Info(\"Loaded config file\", \"path\", defaultPath)\n\t\t}\n\t}\n\n\t// 2. Override with environment variables (highest priority)\n\tif v := os.Getenv(\"PORT\"); v != \"\" {\n\t\tif p, err := strconv.Atoi(v); err == nil {\n\t\t\tcfg.Port = p\n\t\t}\n\t}\n\tif v := os.Getenv(\"APP_URL\"); v != \"\" {\n\t\tcfg.AppURL = trimQuotes(v)\n\t}\n\tif v := os.Getenv(\"DATABASE_PATH\"); v != \"\" {\n\t\tcfg.DatabasePath = v\n\t}\n\tif v := os.Getenv(\"CLICKHOUSE_URL\"); v != \"\" {\n\t\tcfg.ClickHouseURL = v\n\t}\n\tif v := os.Getenv(\"CONNECTION_NAME\"); v != \"\" {\n\t\tcfg.ConnectionName = trimQuotes(v)\n\t}\n\t// Backward-compatible typo alias\n\tif v := os.Getenv(\"CONNECITION_NAME\"); v != \"\" {\n\t\tcfg.ConnectionName = trimQuotes(v)\n\t}\n\tif v := os.Getenv(\"APP_SECRET_KEY\"); v != \"\" {\n\t\tcfg.AppSecretKey = trimQuotes(v)\n\t}\n\tif v := os.Getenv(\"ALLOWED_ORIGINS\"); v != \"\" {\n\t\tcfg.AllowedOrigins = nil\n\t\tfor _, o := range strings.Split(v, \",\") {\n\t\t\tif trimmed := strings.TrimSpace(o); trimmed != \"\" {\n\t\t\t\tcfg.AllowedOrigins = append(cfg.AllowedOrigins, trimmed)\n\t\t\t}\n\t\t}\n\t}\n\tif v := os.Getenv(\"TUNNEL_URL\"); v != \"\" {\n\t\tcfg.TunnelURL = v\n\t}\n\n\t// Derive defaults for computed fields\n\tif cfg.AppURL == \"\" {\n\t\tcfg.AppURL = \"http://localhost:\" + strconv.Itoa(cfg.Port)\n\t}\n\tif cfg.TunnelURL == \"\" {\n\t\tcfg.TunnelURL = \"ws://127.0.0.1:\" + strconv.Itoa(cfg.Port) + \"/connect\"\n\t}\n\n\tcfg.DevMode = os.Getenv(\"NODE_ENV\") != \"production\"\n\n\treturn cfg\n}\n\nfunc loadServerConfigFile(path string, cfg *Config) error {\n\tdata, err := os.ReadFile(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar fc serverConfigFile\n\tif err := yaml.Unmarshal(data, &fc); err != nil {\n\t\treturn err\n\t}\n\n\tif fc.Port != 0 {\n\t\tcfg.Port = fc.Port\n\t}\n\tif fc.AppURL != \"\" {\n\t\tcfg.AppURL = fc.AppURL\n\t}\n\tif fc.DatabasePath != \"\" {\n\t\tcfg.DatabasePath = fc.DatabasePath\n\t}\n\tif fc.ClickHouseURL != \"\" {\n\t\tcfg.ClickHouseURL = fc.ClickHouseURL\n\t}\n\tif fc.ConnectionName != \"\" {\n\t\tcfg.ConnectionName = fc.ConnectionName\n\t}\n\tif fc.AppSecretKey != \"\" {\n\t\tcfg.AppSecretKey = fc.AppSecretKey\n\t}\n\tif len(fc.AllowedOrigins) > 0 {\n\t\tcfg.AllowedOrigins = fc.AllowedOrigins\n\t}\n\tif fc.TunnelURL != \"\" {\n\t\tcfg.TunnelURL = fc.TunnelURL\n\t}\n\n\treturn nil\n}\n\n// GenerateServerTemplate returns a YAML config template for the server.\nfunc GenerateServerTemplate() string {\n\treturn `# CH-UI Server Configuration\n#\n# Place this file at:\n#   macOS: ~/.config/ch-ui/server.yaml\n#   Linux: /etc/ch-ui/server.yaml\n#\n# All settings can also be set via environment variables.\n# Priority: env vars > config file > defaults\n\n# HTTP port (default: 3488)\nport: 3488\n\n# Public URL of the server\n# app_url: https://ch-ui.yourcompany.com\n\n# SQLite database path (default: ./data/ch-ui.db)\n# database_path: /var/lib/ch-ui/ch-ui.db\n\n# ClickHouse HTTP endpoint (default: http://localhost:8123)\n# clickhouse_url: http://localhost:8123\n\n# Embedded connection display name (default: Local ClickHouse)\n# connection_name: Local ClickHouse\n\n# Secret key for session encryption (CHANGE THIS in production)\n# app_secret_key: your-random-secret-here\n\n# Allowed CORS origins\n# allowed_origins:\n#   - https://ch-ui.yourcompany.com\n`\n}\n\nfunc (c *Config) IsProduction() bool {\n\treturn !c.DevMode\n}\n\nfunc (c *Config) IsPro() bool {\n\tinfo := license.ValidateLicense(c.LicenseJSON)\n\treturn info.Valid && strings.EqualFold(strings.TrimSpace(info.Edition), \"pro\")\n}\n\nfunc trimQuotes(s string) string {\n\ts = strings.TrimSpace(s)\n\tif len(s) >= 2 {\n\t\tif (s[0] == '\\'' && s[len(s)-1] == '\\'') || (s[0] == '\"' && s[len(s)-1] == '\"') {\n\t\t\treturn s[1 : len(s)-1]\n\t\t}\n\t}\n\treturn s\n}\n"
  },
  {
    "path": "internal/config/secret.go",
    "content": "package config\n\nimport (\n\t\"crypto/rand\"\n\t\"encoding/base64\"\n\t\"fmt\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"strings\"\n)\n\nconst (\n\t// DefaultAppSecretKey exists for backward compatibility only.\n\t// New installs should persist a random key when this placeholder is detected.\n\tDefaultAppSecretKey = \"ch-ui-default-secret-key-change-in-production\"\n)\n\ntype SecretKeySource string\n\nconst (\n\tSecretKeySourceConfigured SecretKeySource = \"configured\"\n\tSecretKeySourceFile       SecretKeySource = \"file\"\n\tSecretKeySourceGenerated  SecretKeySource = \"generated\"\n)\n\n// AppSecretKeyPath returns the default persisted key path based on the database path.\nfunc AppSecretKeyPath(databasePath string) string {\n\tdbPath := strings.TrimSpace(databasePath)\n\tif dbPath == \"\" {\n\t\tdbPath = \"./data/ch-ui.db\"\n\t}\n\treturn filepath.Join(filepath.Dir(dbPath), \".app_secret_key\")\n}\n\n// EnsureAppSecretKey guarantees a non-default secret key.\n// If the configured key is default/empty, it loads from the persisted key file,\n// or generates and stores a new key.\nfunc EnsureAppSecretKey(cfg *Config) (SecretKeySource, error) {\n\tif cfg == nil {\n\t\treturn SecretKeySourceConfigured, fmt.Errorf(\"nil config\")\n\t}\n\n\tcurrent := strings.TrimSpace(cfg.AppSecretKey)\n\tif current != \"\" && current != DefaultAppSecretKey {\n\t\treturn SecretKeySourceConfigured, nil\n\t}\n\n\tsecretPath := AppSecretKeyPath(cfg.DatabasePath)\n\tif data, err := os.ReadFile(secretPath); err == nil {\n\t\tloaded := strings.TrimSpace(string(data))\n\t\tif loaded == \"\" {\n\t\t\treturn SecretKeySourceFile, fmt.Errorf(\"empty app secret key file: %s\", secretPath)\n\t\t}\n\t\tcfg.AppSecretKey = loaded\n\t\treturn SecretKeySourceFile, nil\n\t} else if !os.IsNotExist(err) {\n\t\treturn SecretKeySourceFile, fmt.Errorf(\"read app secret key file: %w\", err)\n\t}\n\n\tsecret, err := generateRandomSecret(48)\n\tif err != nil {\n\t\treturn SecretKeySourceGenerated, err\n\t}\n\n\tif err := os.MkdirAll(filepath.Dir(secretPath), 0700); err != nil {\n\t\treturn SecretKeySourceGenerated, fmt.Errorf(\"create secret key directory: %w\", err)\n\t}\n\tif err := os.WriteFile(secretPath, []byte(secret+\"\\n\"), 0600); err != nil {\n\t\treturn SecretKeySourceGenerated, fmt.Errorf(\"write app secret key file: %w\", err)\n\t}\n\n\tcfg.AppSecretKey = secret\n\treturn SecretKeySourceGenerated, nil\n}\n\nfunc generateRandomSecret(size int) (string, error) {\n\tif size <= 0 {\n\t\tsize = 48\n\t}\n\tbuf := make([]byte, size)\n\tif _, err := rand.Read(buf); err != nil {\n\t\treturn \"\", fmt.Errorf(\"generate random app secret key: %w\", err)\n\t}\n\treturn base64.RawStdEncoding.EncodeToString(buf), nil\n}\n"
  },
  {
    "path": "internal/config/secret_test.go",
    "content": "package config\n\nimport (\n\t\"path/filepath\"\n\t\"testing\"\n)\n\nfunc TestEnsureAppSecretKeyConfigured(t *testing.T) {\n\tcfg := &Config{\n\t\tDatabasePath: \"./data/ch-ui.db\",\n\t\tAppSecretKey: \"already-configured-secret\",\n\t}\n\n\tsource, err := EnsureAppSecretKey(cfg)\n\tif err != nil {\n\t\tt.Fatalf(\"EnsureAppSecretKey returned error: %v\", err)\n\t}\n\tif source != SecretKeySourceConfigured {\n\t\tt.Fatalf(\"unexpected source: got %s want %s\", source, SecretKeySourceConfigured)\n\t}\n\tif cfg.AppSecretKey != \"already-configured-secret\" {\n\t\tt.Fatalf(\"configured secret should be preserved\")\n\t}\n}\n\nfunc TestEnsureAppSecretKeyGenerateAndReload(t *testing.T) {\n\ttmp := t.TempDir()\n\tdbPath := filepath.Join(tmp, \"data\", \"ch-ui.db\")\n\n\tcfg := &Config{\n\t\tDatabasePath: dbPath,\n\t\tAppSecretKey: DefaultAppSecretKey,\n\t}\n\n\tsource, err := EnsureAppSecretKey(cfg)\n\tif err != nil {\n\t\tt.Fatalf(\"EnsureAppSecretKey returned error: %v\", err)\n\t}\n\tif source != SecretKeySourceGenerated {\n\t\tt.Fatalf(\"unexpected source on first run: got %s want %s\", source, SecretKeySourceGenerated)\n\t}\n\tif cfg.AppSecretKey == \"\" || cfg.AppSecretKey == DefaultAppSecretKey {\n\t\tt.Fatalf(\"generated secret should be non-empty and non-default\")\n\t}\n\n\tfirst := cfg.AppSecretKey\n\tcfgReload := &Config{\n\t\tDatabasePath: dbPath,\n\t\tAppSecretKey: DefaultAppSecretKey,\n\t}\n\n\tsource, err = EnsureAppSecretKey(cfgReload)\n\tif err != nil {\n\t\tt.Fatalf(\"EnsureAppSecretKey reload returned error: %v\", err)\n\t}\n\tif source != SecretKeySourceFile {\n\t\tt.Fatalf(\"unexpected source on reload: got %s want %s\", source, SecretKeySourceFile)\n\t}\n\tif cfgReload.AppSecretKey != first {\n\t\tt.Fatalf(\"reloaded secret mismatch\")\n\t}\n}\n"
  },
  {
    "path": "internal/crypto/aes.go",
    "content": "package crypto\n\nimport (\n\t\"crypto/aes\"\n\t\"crypto/cipher\"\n\t\"crypto/rand\"\n\t\"encoding/hex\"\n\t\"fmt\"\n\t\"strings\"\n\n\t\"golang.org/x/crypto/scrypt\"\n)\n\n// deriveKey derives a 32-byte AES key from the secret using scrypt.\n// Parameters match Node.js crypto.scryptSync(secret, 'salt', 32) defaults: N=16384, r=8, p=1.\n//\n// SECURITY NOTE: The static salt weakens key derivation — all installations using\n// the same AppSecretKey will produce the same derived key. A future version should\n// use a per-installation salt (stored alongside the DB) with a data migration path.\nfunc deriveKey(secret string) ([]byte, error) {\n\treturn scrypt.Key([]byte(secret), []byte(\"salt\"), 16384, 8, 1, 32)\n}\n\n// Encrypt encrypts plaintext using AES-256-GCM with a 16-byte nonce.\n// Returns format: iv:authTag:encrypted (hex-encoded).\nfunc Encrypt(plaintext, secret string) (string, error) {\n\tkey, err := deriveKey(secret)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"key derivation failed: %w\", err)\n\t}\n\n\tblock, err := aes.NewCipher(key)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"cipher creation failed: %w\", err)\n\t}\n\n\t// Use 16-byte nonce to match Node.js createCipheriv('aes-256-gcm', key, iv) with 16-byte IV\n\tgcm, err := cipher.NewGCMWithNonceSize(block, 16)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"GCM creation failed: %w\", err)\n\t}\n\n\tiv := make([]byte, 16)\n\tif _, err := rand.Read(iv); err != nil {\n\t\treturn \"\", fmt.Errorf(\"random IV generation failed: %w\", err)\n\t}\n\n\t// Seal appends ciphertext + tag\n\tsealed := gcm.Seal(nil, iv, []byte(plaintext), nil)\n\n\t// Split sealed into encrypted data and auth tag (last 16 bytes)\n\ttagSize := gcm.Overhead()\n\tencrypted := sealed[:len(sealed)-tagSize]\n\tauthTag := sealed[len(sealed)-tagSize:]\n\n\treturn fmt.Sprintf(\"%s:%s:%s\",\n\t\thex.EncodeToString(iv),\n\t\thex.EncodeToString(authTag),\n\t\thex.EncodeToString(encrypted),\n\t), nil\n}\n\n// Decrypt decrypts a string in format iv:authTag:encrypted using AES-256-GCM.\nfunc Decrypt(encryptedStr, secret string) (string, error) {\n\tparts := strings.Split(encryptedStr, \":\")\n\tif len(parts) != 3 {\n\t\treturn \"\", fmt.Errorf(\"invalid encrypted format: expected 3 parts, got %d\", len(parts))\n\t}\n\n\tiv, err := hex.DecodeString(parts[0])\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"invalid IV hex: %w\", err)\n\t}\n\n\tauthTag, err := hex.DecodeString(parts[1])\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"invalid auth tag hex: %w\", err)\n\t}\n\n\tencrypted, err := hex.DecodeString(parts[2])\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"invalid encrypted data hex: %w\", err)\n\t}\n\n\tkey, err := deriveKey(secret)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"key derivation failed: %w\", err)\n\t}\n\n\tblock, err := aes.NewCipher(key)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"cipher creation failed: %w\", err)\n\t}\n\n\tgcm, err := cipher.NewGCMWithNonceSize(block, 16)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"GCM creation failed: %w\", err)\n\t}\n\n\t// Go expects ciphertext + tag concatenated\n\tciphertext := append(encrypted, authTag...)\n\n\tplaintext, err := gcm.Open(nil, iv, ciphertext, nil)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"decryption failed: %w\", err)\n\t}\n\n\treturn string(plaintext), nil\n}\n\n// IsEncrypted checks if a string appears to be in the encrypted format (iv:authTag:encrypted).\nfunc IsEncrypted(value string) bool {\n\tparts := strings.Split(value, \":\")\n\treturn len(parts) == 3 &&\n\t\tlen(parts[0]) == 32 && // IV: 16 bytes = 32 hex chars\n\t\tlen(parts[1]) == 32 && // Auth tag: 16 bytes = 32 hex chars\n\t\tlen(parts[2]) > 0 // Encrypted data exists\n}\n"
  },
  {
    "path": "internal/database/alert_digests.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\ntype AlertRouteDigestWithDetails struct {\n\tID                               string  `json:\"id\"`\n\tRouteID                          string  `json:\"route_id\"`\n\tRuleID                           string  `json:\"rule_id\"`\n\tChannelID                        string  `json:\"channel_id\"`\n\tBucketStart                      string  `json:\"bucket_start\"`\n\tBucketEnd                        string  `json:\"bucket_end\"`\n\tEventType                        string  `json:\"event_type\"`\n\tSeverity                         string  `json:\"severity\"`\n\tEventCount                       int     `json:\"event_count\"`\n\tEventIDsJSON                     string  `json:\"event_ids_json\"`\n\tTitlesJSON                       string  `json:\"titles_json\"`\n\tStatus                           string  `json:\"status\"`\n\tAttemptCount                     int     `json:\"attempt_count\"`\n\tMaxAttempts                      int     `json:\"max_attempts\"`\n\tNextAttemptAt                    string  `json:\"next_attempt_at\"`\n\tLastError                        *string `json:\"last_error\"`\n\tCreatedAt                        string  `json:\"created_at\"`\n\tUpdatedAt                        string  `json:\"updated_at\"`\n\tSentAt                           *string `json:\"sent_at\"`\n\tRouteRecipientsJSON              string  `json:\"route_recipients_json\"`\n\tEscalationChannelID              *string `json:\"escalation_channel_id\"`\n\tEscalationRecipientsJSON         *string `json:\"escalation_recipients_json\"`\n\tEscalationAfterFailures          int     `json:\"escalation_after_failures\"`\n\tChannelName                      string  `json:\"channel_name\"`\n\tChannelType                      string  `json:\"channel_type\"`\n\tChannelConfigEncrypted           string  `json:\"channel_config_encrypted\"`\n\tEscalationChannelName            *string `json:\"escalation_channel_name\"`\n\tEscalationChannelType            *string `json:\"escalation_channel_type\"`\n\tEscalationChannelConfigEncrypted *string `json:\"escalation_channel_config_encrypted\"`\n}\n\nfunc (db *DB) UpsertAlertRouteDigest(rule AlertRule, route AlertRuleRouteView, event AlertEvent, now time.Time) error {\n\twindowMins := route.DigestWindowMinutes\n\tif windowMins <= 0 {\n\t\twindowMins = 15\n\t}\n\twindow := time.Duration(windowMins) * time.Minute\n\tbucketStart := now.UTC().Truncate(window)\n\tbucketEnd := bucketStart.Add(window)\n\n\ttx, err := db.conn.Begin()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"begin digest upsert: %w\", err)\n\t}\n\tdefer tx.Rollback()\n\n\tvar (\n\t\tdigestID     string\n\t\teventCount   int\n\t\teventIDsJSON string\n\t\ttitlesJSON   string\n\t)\n\terr = tx.QueryRow(\n\t\t`SELECT id, event_count, event_ids_json, titles_json\n\t\t FROM alert_route_digests\n\t\t WHERE route_id = ? AND bucket_start = ? AND event_type = ? AND severity = ?`,\n\t\troute.ID, bucketStart.Format(time.RFC3339), event.EventType, event.Severity,\n\t).Scan(&digestID, &eventCount, &eventIDsJSON, &titlesJSON)\n\tswitch err {\n\tcase nil:\n\t\tids := parseDigestStringArray(eventIDsJSON)\n\t\tif len(ids) < 200 {\n\t\t\tids = append(ids, event.ID)\n\t\t}\n\t\ttitles := parseDigestStringArray(titlesJSON)\n\t\tif len(titles) < 30 {\n\t\t\ttitles = append(titles, strings.TrimSpace(event.Title))\n\t\t}\n\t\tidsPayload, _ := json.Marshal(ids)\n\t\ttitlesPayload, _ := json.Marshal(titles)\n\t\tif _, err := tx.Exec(\n\t\t\t`UPDATE alert_route_digests\n\t\t\t SET event_count = ?,\n\t\t\t     event_ids_json = ?,\n\t\t\t     titles_json = ?,\n\t\t\t     updated_at = ?\n\t\t\t WHERE id = ?`,\n\t\t\teventCount+1,\n\t\t\tstring(idsPayload),\n\t\t\tstring(titlesPayload),\n\t\t\tnow.UTC().Format(time.RFC3339),\n\t\t\tdigestID,\n\t\t); err != nil {\n\t\t\treturn fmt.Errorf(\"update digest batch: %w\", err)\n\t\t}\n\tcase sql.ErrNoRows:\n\t\tdigestID = uuid.NewString()\n\t\tmaxAttempts := rule.MaxAttempts\n\t\tif maxAttempts <= 0 {\n\t\t\tmaxAttempts = 5\n\t\t}\n\t\tidsPayload, _ := json.Marshal([]string{event.ID})\n\t\ttitlesPayload, _ := json.Marshal([]string{strings.TrimSpace(event.Title)})\n\t\tif _, err := tx.Exec(\n\t\t\t`INSERT INTO alert_route_digests\n\t\t\t (id, route_id, rule_id, channel_id, bucket_start, bucket_end, event_type, severity, event_count, event_ids_json, titles_json, status, attempt_count, max_attempts, next_attempt_at, created_at, updated_at)\n\t\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, 1, ?, ?, 'collecting', 0, ?, ?, ?, ?)`,\n\t\t\tdigestID,\n\t\t\troute.ID,\n\t\t\trule.ID,\n\t\t\troute.ChannelID,\n\t\t\tbucketStart.Format(time.RFC3339),\n\t\t\tbucketEnd.Format(time.RFC3339),\n\t\t\tevent.EventType,\n\t\t\tevent.Severity,\n\t\t\tstring(idsPayload),\n\t\t\tstring(titlesPayload),\n\t\t\tmaxAttempts,\n\t\t\tbucketEnd.Format(time.RFC3339),\n\t\t\tnow.UTC().Format(time.RFC3339),\n\t\t\tnow.UTC().Format(time.RFC3339),\n\t\t); err != nil {\n\t\t\treturn fmt.Errorf(\"insert digest batch: %w\", err)\n\t\t}\n\tdefault:\n\t\treturn fmt.Errorf(\"load digest batch: %w\", err)\n\t}\n\n\tif err := tx.Commit(); err != nil {\n\t\treturn fmt.Errorf(\"commit digest upsert: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) ListDueAlertRouteDigests(limit int) ([]AlertRouteDigestWithDetails, error) {\n\tif limit <= 0 {\n\t\tlimit = 20\n\t}\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\trows, err := db.conn.Query(\n\t\t`SELECT\n\t\t\td.id, d.route_id, d.rule_id, d.channel_id, d.bucket_start, d.bucket_end, d.event_type, d.severity, d.event_count,\n\t\t\td.event_ids_json, d.titles_json, d.status, d.attempt_count, d.max_attempts, d.next_attempt_at, d.last_error, d.created_at, d.updated_at, d.sent_at,\n\t\t\trr.recipients_json,\n\t\t\trp.escalation_channel_id, rp.escalation_recipients_json, COALESCE(rp.escalation_after_failures, 0),\n\t\t\tc.name, c.channel_type, c.config_encrypted,\n\t\t\tec.name, ec.channel_type, ec.config_encrypted\n\t\t FROM alert_route_digests d\n\t\t JOIN alert_rule_routes rr ON rr.id = d.route_id\n\t\t LEFT JOIN alert_route_policies rp ON rp.route_id = rr.id\n\t\t JOIN alert_channels c ON c.id = d.channel_id\n\t\t LEFT JOIN alert_channels ec ON ec.id = rp.escalation_channel_id\n\t\t WHERE d.status IN ('collecting', 'retrying')\n\t\t   AND d.bucket_end <= ?\n\t\t   AND d.next_attempt_at <= ?\n\t\t   AND d.attempt_count < d.max_attempts\n\t\t ORDER BY d.bucket_end ASC\n\t\t LIMIT ?`,\n\t\tnow, now, limit,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"list due alert route digests: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tout := make([]AlertRouteDigestWithDetails, 0)\n\tfor rows.Next() {\n\t\tvar item AlertRouteDigestWithDetails\n\t\tvar lastError, sentAt sql.NullString\n\t\tvar escalationChannelID, escalationRecipientsJSON, escalationChannelName, escalationChannelType, escalationChannelConfig sql.NullString\n\t\tif err := rows.Scan(\n\t\t\t&item.ID, &item.RouteID, &item.RuleID, &item.ChannelID, &item.BucketStart, &item.BucketEnd, &item.EventType, &item.Severity, &item.EventCount,\n\t\t\t&item.EventIDsJSON, &item.TitlesJSON, &item.Status, &item.AttemptCount, &item.MaxAttempts, &item.NextAttemptAt, &lastError, &item.CreatedAt, &item.UpdatedAt, &sentAt,\n\t\t\t&item.RouteRecipientsJSON,\n\t\t\t&escalationChannelID, &escalationRecipientsJSON, &item.EscalationAfterFailures,\n\t\t\t&item.ChannelName, &item.ChannelType, &item.ChannelConfigEncrypted,\n\t\t\t&escalationChannelName, &escalationChannelType, &escalationChannelConfig,\n\t\t); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan due alert route digest: %w\", err)\n\t\t}\n\t\titem.LastError = nullStringToPtr(lastError)\n\t\titem.SentAt = nullStringToPtr(sentAt)\n\t\titem.EscalationChannelID = nullStringToPtr(escalationChannelID)\n\t\titem.EscalationRecipientsJSON = nullStringToPtr(escalationRecipientsJSON)\n\t\titem.EscalationChannelName = nullStringToPtr(escalationChannelName)\n\t\titem.EscalationChannelType = nullStringToPtr(escalationChannelType)\n\t\titem.EscalationChannelConfigEncrypted = nullStringToPtr(escalationChannelConfig)\n\t\tout = append(out, item)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate due alert route digests: %w\", err)\n\t}\n\treturn out, nil\n}\n\nfunc (db *DB) MarkAlertRouteDigestSending(id string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE alert_route_digests\n\t\t SET status = 'sending',\n\t\t     attempt_count = attempt_count + 1,\n\t\t     updated_at = ?\n\t\t WHERE id = ?`,\n\t\tnow, id,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"mark digest sending: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) MarkAlertRouteDigestSent(id string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE alert_route_digests\n\t\t SET status = 'sent',\n\t\t     sent_at = ?,\n\t\t     updated_at = ?\n\t\t WHERE id = ?`,\n\t\tnow, now, id,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"mark digest sent: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) MarkAlertRouteDigestRetry(id string, nextAttemptAt time.Time, lastError string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE alert_route_digests\n\t\t SET status = 'retrying',\n\t\t     next_attempt_at = ?,\n\t\t     last_error = ?,\n\t\t     updated_at = ?\n\t\t WHERE id = ?`,\n\t\tnextAttemptAt.UTC().Format(time.RFC3339),\n\t\tnullableString(lastError),\n\t\tnow,\n\t\tid,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"mark digest retry: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) MarkAlertRouteDigestFailed(id string, lastError string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE alert_route_digests\n\t\t SET status = 'failed',\n\t\t     last_error = ?,\n\t\t     updated_at = ?\n\t\t WHERE id = ?`,\n\t\tnullableString(lastError),\n\t\tnow,\n\t\tid,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"mark digest failed: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc parseDigestStringArray(raw string) []string {\n\tif strings.TrimSpace(raw) == \"\" {\n\t\treturn []string{}\n\t}\n\tvar values []string\n\tif err := json.Unmarshal([]byte(raw), &values); err != nil {\n\t\treturn []string{}\n\t}\n\tout := make([]string, 0, len(values))\n\tfor _, v := range values {\n\t\tv = strings.TrimSpace(v)\n\t\tif v != \"\" {\n\t\t\tout = append(out, v)\n\t\t}\n\t}\n\treturn out\n}\n"
  },
  {
    "path": "internal/database/alerts.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\ntype AlertChannel struct {\n\tID              string  `json:\"id\"`\n\tName            string  `json:\"name\"`\n\tChannelType     string  `json:\"channel_type\"`\n\tConfigEncrypted string  `json:\"-\"`\n\tIsActive        bool    `json:\"is_active\"`\n\tCreatedBy       *string `json:\"created_by\"`\n\tCreatedAt       string  `json:\"created_at\"`\n\tUpdatedAt       string  `json:\"updated_at\"`\n}\n\ntype AlertRule struct {\n\tID              string  `json:\"id\"`\n\tName            string  `json:\"name\"`\n\tEventType       string  `json:\"event_type\"`\n\tSeverityMin     string  `json:\"severity_min\"`\n\tEnabled         bool    `json:\"enabled\"`\n\tCooldownSeconds int     `json:\"cooldown_seconds\"`\n\tMaxAttempts     int     `json:\"max_attempts\"`\n\tSubjectTemplate *string `json:\"subject_template\"`\n\tBodyTemplate    *string `json:\"body_template\"`\n\tCreatedBy       *string `json:\"created_by\"`\n\tCreatedAt       string  `json:\"created_at\"`\n\tUpdatedAt       string  `json:\"updated_at\"`\n}\n\ntype AlertRuleRoute struct {\n\tID                       string   `json:\"id\"`\n\tRuleID                   string   `json:\"rule_id\"`\n\tChannelID                string   `json:\"channel_id\"`\n\tRecipients               []string `json:\"recipients\"`\n\tRecipientsJSON           string   `json:\"-\"`\n\tIsActive                 bool     `json:\"is_active\"`\n\tDeliveryMode             string   `json:\"delivery_mode\"`\n\tDigestWindowMinutes      int      `json:\"digest_window_minutes\"`\n\tEscalationChannelID      *string  `json:\"escalation_channel_id\"`\n\tEscalationRecipients     []string `json:\"escalation_recipients\"`\n\tEscalationRecipientsJSON string   `json:\"-\"`\n\tEscalationAfterFailures  int      `json:\"escalation_after_failures\"`\n\tCreatedAt                string   `json:\"created_at\"`\n\tUpdatedAt                string   `json:\"updated_at\"`\n}\n\ntype AlertRuleRouteView struct {\n\tAlertRuleRoute\n\tChannelName           string  `json:\"channel_name\"`\n\tChannelType           string  `json:\"channel_type\"`\n\tEscalationChannelName *string `json:\"escalation_channel_name\"`\n\tEscalationChannelType *string `json:\"escalation_channel_type\"`\n}\n\ntype AlertEvent struct {\n\tID           string  `json:\"id\"`\n\tConnectionID *string `json:\"connection_id\"`\n\tEventType    string  `json:\"event_type\"`\n\tSeverity     string  `json:\"severity\"`\n\tTitle        string  `json:\"title\"`\n\tMessage      string  `json:\"message\"`\n\tPayloadJSON  *string `json:\"payload_json\"`\n\tFingerprint  *string `json:\"fingerprint\"`\n\tSourceRef    *string `json:\"source_ref\"`\n\tStatus       string  `json:\"status\"`\n\tCreatedAt    string  `json:\"created_at\"`\n\tProcessedAt  *string `json:\"processed_at\"`\n}\n\ntype AlertDispatchJob struct {\n\tID                string  `json:\"id\"`\n\tEventID           string  `json:\"event_id\"`\n\tRuleID            string  `json:\"rule_id\"`\n\tRouteID           string  `json:\"route_id\"`\n\tChannelID         string  `json:\"channel_id\"`\n\tStatus            string  `json:\"status\"`\n\tAttemptCount      int     `json:\"attempt_count\"`\n\tMaxAttempts       int     `json:\"max_attempts\"`\n\tNextAttemptAt     string  `json:\"next_attempt_at\"`\n\tLastError         *string `json:\"last_error\"`\n\tProviderMessageID *string `json:\"provider_message_id\"`\n\tCreatedAt         string  `json:\"created_at\"`\n\tUpdatedAt         string  `json:\"updated_at\"`\n\tSentAt            *string `json:\"sent_at\"`\n}\n\ntype AlertDispatchJobWithDetails struct {\n\tAlertDispatchJob\n\tEventType                        string  `json:\"event_type\"`\n\tEventSeverity                    string  `json:\"event_severity\"`\n\tEventTitle                       string  `json:\"event_title\"`\n\tEventMessage                     string  `json:\"event_message\"`\n\tEventPayloadJSON                 *string `json:\"event_payload_json\"`\n\tEventFingerprint                 *string `json:\"event_fingerprint\"`\n\tRuleName                         string  `json:\"rule_name\"`\n\tRuleCooldownSeconds              int     `json:\"rule_cooldown_seconds\"`\n\tRuleSubjectTemplate              *string `json:\"rule_subject_template\"`\n\tRuleBodyTemplate                 *string `json:\"rule_body_template\"`\n\tRouteRecipientsJSON              string  `json:\"route_recipients_json\"`\n\tRouteDeliveryMode                string  `json:\"route_delivery_mode\"`\n\tRouteDigestWindowMins            int     `json:\"route_digest_window_minutes\"`\n\tRouteEscalationChannelID         *string `json:\"route_escalation_channel_id\"`\n\tRouteEscalationRecipientsJSON    *string `json:\"route_escalation_recipients_json\"`\n\tRouteEscalationAfterFailures     int     `json:\"route_escalation_after_failures\"`\n\tChannelName                      string  `json:\"channel_name\"`\n\tChannelType                      string  `json:\"channel_type\"`\n\tChannelConfigEncrypted           string  `json:\"channel_config_encrypted\"`\n\tEscalationChannelName            *string `json:\"escalation_channel_name\"`\n\tEscalationChannelType            *string `json:\"escalation_channel_type\"`\n\tEscalationChannelConfigEncrypted *string `json:\"escalation_channel_config_encrypted\"`\n}\n\nfunc (db *DB) CreateAlertChannel(name, channelType, encryptedConfig string, isActive bool, createdBy string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO alert_channels (id, name, channel_type, config_encrypted, is_active, created_by, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tid, strings.TrimSpace(name), strings.TrimSpace(channelType), encryptedConfig, boolToInt(isActive), nullableString(createdBy), now, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create alert channel: %w\", err)\n\t}\n\treturn id, nil\n}\n\nfunc (db *DB) UpdateAlertChannel(id, name, channelType string, encryptedConfig *string, isActive bool) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif encryptedConfig != nil {\n\t\tif _, err := db.conn.Exec(\n\t\t\t`UPDATE alert_channels\n\t\t\t SET name = ?, channel_type = ?, config_encrypted = ?, is_active = ?, updated_at = ?\n\t\t\t WHERE id = ?`,\n\t\t\tstrings.TrimSpace(name), strings.TrimSpace(channelType), *encryptedConfig, boolToInt(isActive), now, id,\n\t\t); err != nil {\n\t\t\treturn fmt.Errorf(\"update alert channel: %w\", err)\n\t\t}\n\t\treturn nil\n\t}\n\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE alert_channels\n\t\t SET name = ?, channel_type = ?, is_active = ?, updated_at = ?\n\t\t WHERE id = ?`,\n\t\tstrings.TrimSpace(name), strings.TrimSpace(channelType), boolToInt(isActive), now, id,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"update alert channel: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) DeleteAlertChannel(id string) error {\n\tif _, err := db.conn.Exec(`DELETE FROM alert_channels WHERE id = ?`, id); err != nil {\n\t\treturn fmt.Errorf(\"delete alert channel: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) GetAlertChannelByID(id string) (*AlertChannel, error) {\n\trow := db.conn.QueryRow(\n\t\t`SELECT id, name, channel_type, config_encrypted, is_active, created_by, created_at, updated_at\n\t\t FROM alert_channels WHERE id = ?`,\n\t\tid,\n\t)\n\treturn scanAlertChannelRow(row)\n}\n\nfunc (db *DB) ListAlertChannels() ([]AlertChannel, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, name, channel_type, config_encrypted, is_active, created_by, created_at, updated_at\n\t\t FROM alert_channels\n\t\t ORDER BY created_at DESC`,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"list alert channels: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tout := make([]AlertChannel, 0)\n\tfor rows.Next() {\n\t\tchannel, err := scanAlertChannel(rows)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tout = append(out, channel)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate alert channels: %w\", err)\n\t}\n\treturn out, nil\n}\n\nfunc (db *DB) CreateAlertRule(name, eventType, severityMin string, enabled bool, cooldownSeconds, maxAttempts int, subjectTemplate, bodyTemplate, createdBy string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif cooldownSeconds < 0 {\n\t\tcooldownSeconds = 0\n\t}\n\tif maxAttempts <= 0 {\n\t\tmaxAttempts = 5\n\t}\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO alert_rules (id, name, event_type, severity_min, enabled, cooldown_seconds, max_attempts, subject_template, body_template, created_by, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tid, strings.TrimSpace(name), strings.TrimSpace(eventType), strings.TrimSpace(severityMin), boolToInt(enabled),\n\t\tcooldownSeconds, maxAttempts, nullableString(subjectTemplate), nullableString(bodyTemplate), nullableString(createdBy), now, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create alert rule: %w\", err)\n\t}\n\treturn id, nil\n}\n\nfunc (db *DB) UpdateAlertRule(id, name, eventType, severityMin string, enabled bool, cooldownSeconds, maxAttempts int, subjectTemplate, bodyTemplate string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif cooldownSeconds < 0 {\n\t\tcooldownSeconds = 0\n\t}\n\tif maxAttempts <= 0 {\n\t\tmaxAttempts = 5\n\t}\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE alert_rules\n\t\t SET name = ?, event_type = ?, severity_min = ?, enabled = ?, cooldown_seconds = ?, max_attempts = ?, subject_template = ?, body_template = ?, updated_at = ?\n\t\t WHERE id = ?`,\n\t\tstrings.TrimSpace(name), strings.TrimSpace(eventType), strings.TrimSpace(severityMin), boolToInt(enabled),\n\t\tcooldownSeconds, maxAttempts, nullableString(subjectTemplate), nullableString(bodyTemplate), now, id,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"update alert rule: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) DeleteAlertRule(id string) error {\n\tif _, err := db.conn.Exec(`DELETE FROM alert_rules WHERE id = ?`, id); err != nil {\n\t\treturn fmt.Errorf(\"delete alert rule: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) GetAlertRuleByID(id string) (*AlertRule, error) {\n\trow := db.conn.QueryRow(\n\t\t`SELECT id, name, event_type, severity_min, enabled, cooldown_seconds, max_attempts, subject_template, body_template, created_by, created_at, updated_at\n\t\t FROM alert_rules WHERE id = ?`,\n\t\tid,\n\t)\n\treturn scanAlertRuleRow(row)\n}\n\nfunc (db *DB) ListAlertRules() ([]AlertRule, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, name, event_type, severity_min, enabled, cooldown_seconds, max_attempts, subject_template, body_template, created_by, created_at, updated_at\n\t\t FROM alert_rules\n\t\t ORDER BY created_at DESC`,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"list alert rules: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tout := make([]AlertRule, 0)\n\tfor rows.Next() {\n\t\trule, err := scanAlertRule(rows)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tout = append(out, rule)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate alert rules: %w\", err)\n\t}\n\treturn out, nil\n}\n\nfunc (db *DB) ListEnabledAlertRules() ([]AlertRule, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, name, event_type, severity_min, enabled, cooldown_seconds, max_attempts, subject_template, body_template, created_by, created_at, updated_at\n\t\t FROM alert_rules\n\t\t WHERE enabled = 1\n\t\t ORDER BY created_at DESC`,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"list enabled alert rules: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tout := make([]AlertRule, 0)\n\tfor rows.Next() {\n\t\trule, err := scanAlertRule(rows)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tout = append(out, rule)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate enabled alert rules: %w\", err)\n\t}\n\treturn out, nil\n}\n\nfunc (db *DB) ReplaceAlertRuleRoutes(ruleID string, routes []AlertRuleRoute) error {\n\ttx, err := db.conn.Begin()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"begin replace alert routes: %w\", err)\n\t}\n\tdefer tx.Rollback()\n\n\tif _, err := tx.Exec(`DELETE FROM alert_rule_routes WHERE rule_id = ?`, ruleID); err != nil {\n\t\treturn fmt.Errorf(\"clear alert routes: %w\", err)\n\t}\n\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tfor _, route := range routes {\n\t\trecipientsJSON, err := json.Marshal(route.Recipients)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"marshal route recipients: %w\", err)\n\t\t}\n\t\tid := route.ID\n\t\tif strings.TrimSpace(id) == \"\" {\n\t\t\tid = uuid.NewString()\n\t\t}\n\t\tif _, err := tx.Exec(\n\t\t\t`INSERT INTO alert_rule_routes (id, rule_id, channel_id, recipients_json, is_active, created_at, updated_at)\n\t\t\t VALUES (?, ?, ?, ?, ?, ?, ?)`,\n\t\t\tid, ruleID, route.ChannelID, string(recipientsJSON), boolToInt(route.IsActive), now, now,\n\t\t); err != nil {\n\t\t\treturn fmt.Errorf(\"insert alert route: %w\", err)\n\t\t}\n\n\t\tdeliveryMode := strings.ToLower(strings.TrimSpace(route.DeliveryMode))\n\t\tif deliveryMode != \"digest\" {\n\t\t\tdeliveryMode = \"immediate\"\n\t\t}\n\t\tdigestWindow := route.DigestWindowMinutes\n\t\tif digestWindow < 0 {\n\t\t\tdigestWindow = 0\n\t\t}\n\t\tescalationAfter := route.EscalationAfterFailures\n\t\tif escalationAfter < 0 {\n\t\t\tescalationAfter = 0\n\t\t}\n\t\tvar escalationChannelID interface{}\n\t\tif route.EscalationChannelID != nil && strings.TrimSpace(*route.EscalationChannelID) != \"\" {\n\t\t\tescalationChannelID = strings.TrimSpace(*route.EscalationChannelID)\n\t\t}\n\t\tvar escalationRecipients interface{}\n\t\tif len(route.EscalationRecipients) > 0 {\n\t\t\tpayload, err := json.Marshal(route.EscalationRecipients)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"marshal escalation recipients: %w\", err)\n\t\t\t}\n\t\t\tescalationRecipients = string(payload)\n\t\t}\n\t\tif _, err := tx.Exec(\n\t\t\t`INSERT INTO alert_route_policies\n\t\t\t (route_id, delivery_mode, digest_window_minutes, escalation_channel_id, escalation_recipients_json, escalation_after_failures, created_at, updated_at)\n\t\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n\t\t\t ON CONFLICT(route_id) DO UPDATE SET\n\t\t\t   delivery_mode = excluded.delivery_mode,\n\t\t\t   digest_window_minutes = excluded.digest_window_minutes,\n\t\t\t   escalation_channel_id = excluded.escalation_channel_id,\n\t\t\t   escalation_recipients_json = excluded.escalation_recipients_json,\n\t\t\t   escalation_after_failures = excluded.escalation_after_failures,\n\t\t\t   updated_at = excluded.updated_at`,\n\t\t\tid, deliveryMode, digestWindow, escalationChannelID, escalationRecipients, escalationAfter, now, now,\n\t\t); err != nil {\n\t\t\treturn fmt.Errorf(\"upsert alert route policy: %w\", err)\n\t\t}\n\t}\n\n\tif err := tx.Commit(); err != nil {\n\t\treturn fmt.Errorf(\"commit replace alert routes: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) ListAlertRuleRoutes(ruleID string) ([]AlertRuleRouteView, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT rr.id, rr.rule_id, rr.channel_id, rr.recipients_json, rr.is_active, rr.created_at, rr.updated_at, c.name, c.channel_type,\n\t\t        COALESCE(rp.delivery_mode, 'immediate'),\n\t\t\t\tCOALESCE(rp.digest_window_minutes, 0),\n\t\t\t\trp.escalation_channel_id,\n\t\t\t\trp.escalation_recipients_json,\n\t\t\t\tCOALESCE(rp.escalation_after_failures, 0),\n\t\t\t\tec.name,\n\t\t\t\tec.channel_type\n\t\t FROM alert_rule_routes rr\n\t\t JOIN alert_channels c ON c.id = rr.channel_id\n\t\t LEFT JOIN alert_route_policies rp ON rp.route_id = rr.id\n\t\t LEFT JOIN alert_channels ec ON ec.id = rp.escalation_channel_id\n\t\t WHERE rr.rule_id = ?\n\t\t ORDER BY rr.created_at ASC`,\n\t\truleID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"list alert rule routes: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tout := make([]AlertRuleRouteView, 0)\n\tfor rows.Next() {\n\t\tvar item AlertRuleRouteView\n\t\tvar recipientsJSON string\n\t\tvar active int\n\t\tvar escalationChannelID, escalationRecipientsJSON, escalationChannelName, escalationChannelType sql.NullString\n\t\tif err := rows.Scan(\n\t\t\t&item.ID, &item.RuleID, &item.ChannelID, &recipientsJSON, &active, &item.CreatedAt, &item.UpdatedAt,\n\t\t\t&item.ChannelName, &item.ChannelType,\n\t\t\t&item.DeliveryMode, &item.DigestWindowMinutes, &escalationChannelID, &escalationRecipientsJSON, &item.EscalationAfterFailures,\n\t\t\t&escalationChannelName, &escalationChannelType,\n\t\t); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan alert rule route: %w\", err)\n\t\t}\n\t\titem.IsActive = intToBool(active)\n\t\titem.RecipientsJSON = recipientsJSON\n\t\titem.Recipients = parseRecipientsJSON(recipientsJSON)\n\t\titem.EscalationChannelID = nullStringToPtr(escalationChannelID)\n\t\titem.EscalationRecipientsJSON = escalationRecipientsJSON.String\n\t\titem.EscalationRecipients = parseRecipientsJSON(escalationRecipientsJSON.String)\n\t\titem.EscalationChannelName = nullStringToPtr(escalationChannelName)\n\t\titem.EscalationChannelType = nullStringToPtr(escalationChannelType)\n\t\tout = append(out, item)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate alert rule routes: %w\", err)\n\t}\n\treturn out, nil\n}\n\nfunc (db *DB) ListActiveAlertRuleRoutes(ruleID string) ([]AlertRuleRouteView, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT rr.id, rr.rule_id, rr.channel_id, rr.recipients_json, rr.is_active, rr.created_at, rr.updated_at, c.name, c.channel_type,\n\t\t        COALESCE(rp.delivery_mode, 'immediate'),\n\t\t\t\tCOALESCE(rp.digest_window_minutes, 0),\n\t\t\t\trp.escalation_channel_id,\n\t\t\t\trp.escalation_recipients_json,\n\t\t\t\tCOALESCE(rp.escalation_after_failures, 0),\n\t\t\t\tec.name,\n\t\t\t\tec.channel_type\n\t\t FROM alert_rule_routes rr\n\t\t JOIN alert_channels c ON c.id = rr.channel_id\n\t\t LEFT JOIN alert_route_policies rp ON rp.route_id = rr.id\n\t\t LEFT JOIN alert_channels ec ON ec.id = rp.escalation_channel_id\n\t\t WHERE rr.rule_id = ? AND rr.is_active = 1 AND c.is_active = 1\n\t\t ORDER BY rr.created_at ASC`,\n\t\truleID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"list active alert rule routes: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tout := make([]AlertRuleRouteView, 0)\n\tfor rows.Next() {\n\t\tvar item AlertRuleRouteView\n\t\tvar recipientsJSON string\n\t\tvar active int\n\t\tvar escalationChannelID, escalationRecipientsJSON, escalationChannelName, escalationChannelType sql.NullString\n\t\tif err := rows.Scan(\n\t\t\t&item.ID, &item.RuleID, &item.ChannelID, &recipientsJSON, &active, &item.CreatedAt, &item.UpdatedAt,\n\t\t\t&item.ChannelName, &item.ChannelType,\n\t\t\t&item.DeliveryMode, &item.DigestWindowMinutes, &escalationChannelID, &escalationRecipientsJSON, &item.EscalationAfterFailures,\n\t\t\t&escalationChannelName, &escalationChannelType,\n\t\t); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan active alert route: %w\", err)\n\t\t}\n\t\titem.IsActive = intToBool(active)\n\t\titem.RecipientsJSON = recipientsJSON\n\t\titem.Recipients = parseRecipientsJSON(recipientsJSON)\n\t\titem.EscalationChannelID = nullStringToPtr(escalationChannelID)\n\t\titem.EscalationRecipientsJSON = escalationRecipientsJSON.String\n\t\titem.EscalationRecipients = parseRecipientsJSON(escalationRecipientsJSON.String)\n\t\titem.EscalationChannelName = nullStringToPtr(escalationChannelName)\n\t\titem.EscalationChannelType = nullStringToPtr(escalationChannelType)\n\t\tout = append(out, item)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate active alert routes: %w\", err)\n\t}\n\treturn out, nil\n}\n\nfunc (db *DB) CreateAlertEvent(connectionID *string, eventType, severity, title, message string, payload interface{}, fingerprint, sourceRef string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar payloadJSON interface{}\n\tif payload != nil {\n\t\tdata, err := json.Marshal(payload)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"marshal alert payload: %w\", err)\n\t\t}\n\t\tpayloadJSON = string(data)\n\t}\n\n\tvar connectionVal interface{}\n\tif connectionID != nil && strings.TrimSpace(*connectionID) != \"\" {\n\t\tconnectionVal = strings.TrimSpace(*connectionID)\n\t}\n\n\tif _, err := db.conn.Exec(\n\t\t`INSERT INTO alert_events (id, connection_id, event_type, severity, title, message, payload_json, fingerprint, source_ref, status, created_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 'new', ?)`,\n\t\tid, connectionVal, strings.TrimSpace(eventType), strings.TrimSpace(severity),\n\t\tstrings.TrimSpace(title), strings.TrimSpace(message), payloadJSON, nullableString(fingerprint), nullableString(sourceRef), now,\n\t); err != nil {\n\t\treturn \"\", fmt.Errorf(\"create alert event: %w\", err)\n\t}\n\treturn id, nil\n}\n\nfunc (db *DB) ListAlertEvents(limit int, eventType, status string) ([]AlertEvent, error) {\n\tif limit <= 0 {\n\t\tlimit = 100\n\t}\n\tif limit > 1000 {\n\t\tlimit = 1000\n\t}\n\n\twhere := []string{\"1=1\"}\n\targs := make([]interface{}, 0, 4)\n\tif strings.TrimSpace(eventType) != \"\" {\n\t\twhere = append(where, \"event_type = ?\")\n\t\targs = append(args, strings.TrimSpace(eventType))\n\t}\n\tif strings.TrimSpace(status) != \"\" {\n\t\twhere = append(where, \"status = ?\")\n\t\targs = append(args, strings.TrimSpace(status))\n\t}\n\targs = append(args, limit)\n\n\tquery := fmt.Sprintf(\n\t\t`SELECT id, connection_id, event_type, severity, title, message, payload_json, fingerprint, source_ref, status, created_at, processed_at\n\t\t FROM alert_events\n\t\t WHERE %s\n\t\t ORDER BY created_at DESC\n\t\t LIMIT ?`, strings.Join(where, \" AND \"),\n\t)\n\n\trows, err := db.conn.Query(query, args...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"list alert events: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tout := make([]AlertEvent, 0)\n\tfor rows.Next() {\n\t\tvar item AlertEvent\n\t\tvar connectionID, payloadJSON, fingerprint, sourceRef, processedAt sql.NullString\n\t\tif err := rows.Scan(\n\t\t\t&item.ID, &connectionID, &item.EventType, &item.Severity, &item.Title, &item.Message,\n\t\t\t&payloadJSON, &fingerprint, &sourceRef, &item.Status, &item.CreatedAt, &processedAt,\n\t\t); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan alert event: %w\", err)\n\t\t}\n\t\titem.ConnectionID = nullStringToPtr(connectionID)\n\t\titem.PayloadJSON = nullStringToPtr(payloadJSON)\n\t\titem.Fingerprint = nullStringToPtr(fingerprint)\n\t\titem.SourceRef = nullStringToPtr(sourceRef)\n\t\titem.ProcessedAt = nullStringToPtr(processedAt)\n\t\tout = append(out, item)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate alert events: %w\", err)\n\t}\n\treturn out, nil\n}\n\nfunc (db *DB) ListNewAlertEvents(limit int) ([]AlertEvent, error) {\n\tif limit <= 0 {\n\t\tlimit = 50\n\t}\n\n\trows, err := db.conn.Query(\n\t\t`SELECT id, connection_id, event_type, severity, title, message, payload_json, fingerprint, source_ref, status, created_at, processed_at\n\t\t FROM alert_events\n\t\t WHERE status = 'new'\n\t\t ORDER BY created_at ASC\n\t\t LIMIT ?`,\n\t\tlimit,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"list new alert events: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tout := make([]AlertEvent, 0)\n\tfor rows.Next() {\n\t\tvar item AlertEvent\n\t\tvar connectionID, payloadJSON, fingerprint, sourceRef, processedAt sql.NullString\n\t\tif err := rows.Scan(\n\t\t\t&item.ID, &connectionID, &item.EventType, &item.Severity, &item.Title, &item.Message,\n\t\t\t&payloadJSON, &fingerprint, &sourceRef, &item.Status, &item.CreatedAt, &processedAt,\n\t\t); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan new alert event: %w\", err)\n\t\t}\n\t\titem.ConnectionID = nullStringToPtr(connectionID)\n\t\titem.PayloadJSON = nullStringToPtr(payloadJSON)\n\t\titem.Fingerprint = nullStringToPtr(fingerprint)\n\t\titem.SourceRef = nullStringToPtr(sourceRef)\n\t\titem.ProcessedAt = nullStringToPtr(processedAt)\n\t\tout = append(out, item)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate new alert events: %w\", err)\n\t}\n\treturn out, nil\n}\n\nfunc (db *DB) MarkAlertEventProcessed(id string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE alert_events SET status = 'processed', processed_at = ? WHERE id = ?`,\n\t\tnow, id,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"mark alert event processed: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) HasRecentAlertDispatch(routeID, fingerprint string, since time.Time) (bool, error) {\n\tif strings.TrimSpace(fingerprint) == \"\" {\n\t\treturn false, nil\n\t}\n\tvar count int\n\tif err := db.conn.QueryRow(\n\t\t`SELECT COUNT(*)\n\t\t FROM alert_dispatch_jobs j\n\t\t JOIN alert_events e ON e.id = j.event_id\n\t\t WHERE j.route_id = ?\n\t\t   AND e.fingerprint = ?\n\t\t   AND e.created_at >= ?\n\t\t   AND j.status IN ('queued', 'retrying', 'sending', 'sent')`,\n\t\trouteID, fingerprint, since.UTC().Format(time.RFC3339),\n\t).Scan(&count); err != nil {\n\t\treturn false, fmt.Errorf(\"check recent alert dispatch: %w\", err)\n\t}\n\treturn count > 0, nil\n}\n\nfunc (db *DB) CreateAlertDispatchJob(eventID, ruleID, routeID, channelID string, maxAttempts int, nextAttemptAt time.Time) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif maxAttempts <= 0 {\n\t\tmaxAttempts = 5\n\t}\n\tif _, err := db.conn.Exec(\n\t\t`INSERT INTO alert_dispatch_jobs (id, event_id, rule_id, route_id, channel_id, status, attempt_count, max_attempts, next_attempt_at, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, 'queued', 0, ?, ?, ?, ?)`,\n\t\tid, eventID, ruleID, routeID, channelID, maxAttempts, nextAttemptAt.UTC().Format(time.RFC3339), now, now,\n\t); err != nil {\n\t\treturn \"\", fmt.Errorf(\"create alert dispatch job: %w\", err)\n\t}\n\treturn id, nil\n}\n\nfunc (db *DB) ListDueAlertDispatchJobs(limit int) ([]AlertDispatchJobWithDetails, error) {\n\tif limit <= 0 {\n\t\tlimit = 20\n\t}\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\trows, err := db.conn.Query(\n\t\t`SELECT\n\t\t\tj.id, j.event_id, j.rule_id, j.route_id, j.channel_id, j.status, j.attempt_count, j.max_attempts, j.next_attempt_at, j.last_error, j.provider_message_id, j.created_at, j.updated_at, j.sent_at,\n\t\t\te.event_type, e.severity, e.title, e.message, e.payload_json, e.fingerprint,\n\t\t\tr.name, r.cooldown_seconds, r.subject_template, r.body_template,\n\t\t\trr.recipients_json,\n\t\t\tCOALESCE(rp.delivery_mode, 'immediate'),\n\t\t\tCOALESCE(rp.digest_window_minutes, 0),\n\t\t\trp.escalation_channel_id,\n\t\t\trp.escalation_recipients_json,\n\t\t\tCOALESCE(rp.escalation_after_failures, 0),\n\t\t\tc.name, c.channel_type, c.config_encrypted,\n\t\t\tec.name, ec.channel_type, ec.config_encrypted\n\t\t FROM alert_dispatch_jobs j\n\t\t JOIN alert_events e ON e.id = j.event_id\n\t\t JOIN alert_rules r ON r.id = j.rule_id\n\t\t JOIN alert_rule_routes rr ON rr.id = j.route_id\n\t\t LEFT JOIN alert_route_policies rp ON rp.route_id = rr.id\n\t\t JOIN alert_channels c ON c.id = j.channel_id\n\t\t LEFT JOIN alert_channels ec ON ec.id = rp.escalation_channel_id\n\t\t WHERE j.status IN ('queued', 'retrying')\n\t\t   AND j.attempt_count < j.max_attempts\n\t\t   AND j.next_attempt_at <= ?\n\t\t ORDER BY j.next_attempt_at ASC\n\t\t LIMIT ?`,\n\t\tnow, limit,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"list due alert dispatch jobs: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tout := make([]AlertDispatchJobWithDetails, 0)\n\tfor rows.Next() {\n\t\tvar item AlertDispatchJobWithDetails\n\t\tvar lastError, providerMessageID, sentAt sql.NullString\n\t\tvar eventPayloadJSON, eventFingerprint, subjectTemplate, bodyTemplate sql.NullString\n\t\tvar escalationChannelID, escalationRecipientsJSON, escalationChannelName, escalationChannelType, escalationChannelConfig sql.NullString\n\t\tif err := rows.Scan(\n\t\t\t&item.ID, &item.EventID, &item.RuleID, &item.RouteID, &item.ChannelID, &item.Status, &item.AttemptCount, &item.MaxAttempts, &item.NextAttemptAt, &lastError, &providerMessageID, &item.CreatedAt, &item.UpdatedAt, &sentAt,\n\t\t\t&item.EventType, &item.EventSeverity, &item.EventTitle, &item.EventMessage, &eventPayloadJSON, &eventFingerprint,\n\t\t\t&item.RuleName, &item.RuleCooldownSeconds, &subjectTemplate, &bodyTemplate,\n\t\t\t&item.RouteRecipientsJSON,\n\t\t\t&item.RouteDeliveryMode, &item.RouteDigestWindowMins, &escalationChannelID, &escalationRecipientsJSON, &item.RouteEscalationAfterFailures,\n\t\t\t&item.ChannelName, &item.ChannelType, &item.ChannelConfigEncrypted,\n\t\t\t&escalationChannelName, &escalationChannelType, &escalationChannelConfig,\n\t\t); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan due alert dispatch job: %w\", err)\n\t\t}\n\t\titem.LastError = nullStringToPtr(lastError)\n\t\titem.ProviderMessageID = nullStringToPtr(providerMessageID)\n\t\titem.SentAt = nullStringToPtr(sentAt)\n\t\titem.EventPayloadJSON = nullStringToPtr(eventPayloadJSON)\n\t\titem.EventFingerprint = nullStringToPtr(eventFingerprint)\n\t\titem.RuleSubjectTemplate = nullStringToPtr(subjectTemplate)\n\t\titem.RuleBodyTemplate = nullStringToPtr(bodyTemplate)\n\t\titem.RouteEscalationChannelID = nullStringToPtr(escalationChannelID)\n\t\titem.RouteEscalationRecipientsJSON = nullStringToPtr(escalationRecipientsJSON)\n\t\titem.EscalationChannelName = nullStringToPtr(escalationChannelName)\n\t\titem.EscalationChannelType = nullStringToPtr(escalationChannelType)\n\t\titem.EscalationChannelConfigEncrypted = nullStringToPtr(escalationChannelConfig)\n\t\tout = append(out, item)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate due alert dispatch jobs: %w\", err)\n\t}\n\treturn out, nil\n}\n\nfunc (db *DB) MarkAlertDispatchJobSending(id string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE alert_dispatch_jobs\n\t\t SET status = 'sending',\n\t\t     attempt_count = attempt_count + 1,\n\t\t     updated_at = ?\n\t\t WHERE id = ?`,\n\t\tnow, id,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"mark alert dispatch sending: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) MarkAlertDispatchJobSent(id, providerMessageID string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE alert_dispatch_jobs\n\t\t SET status = 'sent',\n\t\t     provider_message_id = ?,\n\t\t     sent_at = ?,\n\t\t     updated_at = ?\n\t\t WHERE id = ?`,\n\t\tnullableString(providerMessageID), now, now, id,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"mark alert dispatch sent: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) MarkAlertDispatchJobRetry(id string, nextAttemptAt time.Time, lastError string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE alert_dispatch_jobs\n\t\t SET status = 'retrying',\n\t\t     next_attempt_at = ?,\n\t\t     last_error = ?,\n\t\t     updated_at = ?\n\t\t WHERE id = ?`,\n\t\tnextAttemptAt.UTC().Format(time.RFC3339), nullableString(lastError), now, id,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"mark alert dispatch retry: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (db *DB) MarkAlertDispatchJobFailed(id, lastError string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE alert_dispatch_jobs\n\t\t SET status = 'failed',\n\t\t     last_error = ?,\n\t\t     updated_at = ?\n\t\t WHERE id = ?`,\n\t\tnullableString(lastError), now, id,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"mark alert dispatch failed: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc scanAlertChannel(scanner interface {\n\tScan(dest ...interface{}) error\n}) (AlertChannel, error) {\n\tvar item AlertChannel\n\tvar configEncrypted, createdBy sql.NullString\n\tvar isActive int\n\tif err := scanner.Scan(\n\t\t&item.ID, &item.Name, &item.ChannelType, &configEncrypted, &isActive, &createdBy, &item.CreatedAt, &item.UpdatedAt,\n\t); err != nil {\n\t\treturn item, fmt.Errorf(\"scan alert channel: %w\", err)\n\t}\n\titem.ConfigEncrypted = configEncrypted.String\n\titem.IsActive = intToBool(isActive)\n\titem.CreatedBy = nullStringToPtr(createdBy)\n\treturn item, nil\n}\n\nfunc scanAlertChannelRow(row *sql.Row) (*AlertChannel, error) {\n\tvar item AlertChannel\n\tvar configEncrypted, createdBy sql.NullString\n\tvar isActive int\n\terr := row.Scan(\n\t\t&item.ID, &item.Name, &item.ChannelType, &configEncrypted, &isActive, &createdBy, &item.CreatedAt, &item.UpdatedAt,\n\t)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"scan alert channel row: %w\", err)\n\t}\n\titem.ConfigEncrypted = configEncrypted.String\n\titem.IsActive = intToBool(isActive)\n\titem.CreatedBy = nullStringToPtr(createdBy)\n\treturn &item, nil\n}\n\nfunc scanAlertRule(scanner interface {\n\tScan(dest ...interface{}) error\n}) (AlertRule, error) {\n\tvar item AlertRule\n\tvar subjectTemplate, bodyTemplate, createdBy sql.NullString\n\tvar enabled int\n\tif err := scanner.Scan(\n\t\t&item.ID, &item.Name, &item.EventType, &item.SeverityMin, &enabled, &item.CooldownSeconds, &item.MaxAttempts,\n\t\t&subjectTemplate, &bodyTemplate, &createdBy, &item.CreatedAt, &item.UpdatedAt,\n\t); err != nil {\n\t\treturn item, fmt.Errorf(\"scan alert rule: %w\", err)\n\t}\n\titem.Enabled = intToBool(enabled)\n\titem.SubjectTemplate = nullStringToPtr(subjectTemplate)\n\titem.BodyTemplate = nullStringToPtr(bodyTemplate)\n\titem.CreatedBy = nullStringToPtr(createdBy)\n\treturn item, nil\n}\n\nfunc scanAlertRuleRow(row *sql.Row) (*AlertRule, error) {\n\tvar item AlertRule\n\tvar enabled int\n\tvar subjectTemplate, bodyTemplate, createdBy sql.NullString\n\terr := row.Scan(\n\t\t&item.ID, &item.Name, &item.EventType, &item.SeverityMin, &enabled, &item.CooldownSeconds, &item.MaxAttempts,\n\t\t&subjectTemplate, &bodyTemplate, &createdBy, &item.CreatedAt, &item.UpdatedAt,\n\t)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"scan alert rule row: %w\", err)\n\t}\n\titem.Enabled = intToBool(enabled)\n\titem.SubjectTemplate = nullStringToPtr(subjectTemplate)\n\titem.BodyTemplate = nullStringToPtr(bodyTemplate)\n\titem.CreatedBy = nullStringToPtr(createdBy)\n\treturn &item, nil\n}\n\nfunc parseRecipientsJSON(raw string) []string {\n\tif strings.TrimSpace(raw) == \"\" {\n\t\treturn []string{}\n\t}\n\tvar values []string\n\tif err := json.Unmarshal([]byte(raw), &values); err != nil {\n\t\treturn []string{}\n\t}\n\tout := make([]string, 0, len(values))\n\tfor _, v := range values {\n\t\tv = strings.TrimSpace(v)\n\t\tif v != \"\" {\n\t\t\tout = append(out, v)\n\t\t}\n\t}\n\treturn out\n}\n"
  },
  {
    "path": "internal/database/audit_logs.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"strings\"\n\n\t\"github.com/google/uuid\"\n)\n\n// AuditLogParams holds parameters for creating an audit log entry.\ntype AuditLogParams struct {\n\tAction       string\n\tUsername     *string\n\tConnectionID *string\n\tDetails      *string\n\tIPAddress    *string\n}\n\n// AuditLog represents an audit log entry.\ntype AuditLog struct {\n\tID           string  `json:\"id\"`\n\tAction       string  `json:\"action\"`\n\tUsername     *string `json:\"username\"`\n\tConnectionID *string `json:\"connection_id\"`\n\tDetails      *string `json:\"details\"`\n\tIPAddress    *string `json:\"ip_address\"`\n\tCreatedAt    string  `json:\"created_at\"`\n}\n\n// CreateAuditLog creates a new audit log entry.\nfunc (db *DB) CreateAuditLog(params AuditLogParams) error {\n\tid := uuid.NewString()\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO audit_logs (id, action, username, connection_id, details, ip_address)\n\t\t VALUES (?, ?, ?, ?, ?, ?)`,\n\t\tid, params.Action, params.Username, params.ConnectionID, params.Details, params.IPAddress,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"create audit log: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetAuditLogs retrieves audit logs, most recent first.\nfunc (db *DB) GetAuditLogs(limit int) ([]AuditLog, error) {\n\treturn db.GetAuditLogsFiltered(limit, \"\", \"\", \"\", \"\")\n}\n\n// GetAuditLogsFiltered retrieves audit logs with optional filters, most recent first.\nfunc (db *DB) GetAuditLogsFiltered(limit int, timeRange, action, username, search string) ([]AuditLog, error) {\n\tif limit <= 0 {\n\t\tlimit = 100\n\t}\n\n\tvar whereClauses []string\n\targs := make([]any, 0, 8)\n\n\ttimeRange = strings.TrimSpace(timeRange)\n\taction = strings.TrimSpace(action)\n\tusername = strings.TrimSpace(username)\n\tsearch = strings.TrimSpace(strings.ToLower(search))\n\n\ttimeRangeOffsets := map[string]string{\n\t\t\"15m\": \"-15 minutes\",\n\t\t\"1h\":  \"-1 hour\",\n\t\t\"6h\":  \"-6 hours\",\n\t\t\"24h\": \"-24 hours\",\n\t\t\"7d\":  \"-7 days\",\n\t\t\"30d\": \"-30 days\",\n\t}\n\tif offset, ok := timeRangeOffsets[timeRange]; ok {\n\t\twhereClauses = append(whereClauses, \"created_at >= datetime('now', ?)\")\n\t\targs = append(args, offset)\n\t}\n\tif action != \"\" {\n\t\twhereClauses = append(whereClauses, \"action = ?\")\n\t\targs = append(args, action)\n\t}\n\tif username != \"\" {\n\t\twhereClauses = append(whereClauses, \"username = ?\")\n\t\targs = append(args, username)\n\t}\n\tif search != \"\" {\n\t\tterm := \"%\" + search + \"%\"\n\t\twhereClauses = append(whereClauses,\n\t\t\t`(\n\t\t\t\tlower(action) LIKE ? OR\n\t\t\t\tlower(COALESCE(username, '')) LIKE ? OR\n\t\t\t\tlower(COALESCE(details, '')) LIKE ? OR\n\t\t\t\tlower(COALESCE(ip_address, '')) LIKE ?\n\t\t\t)`,\n\t\t)\n\t\targs = append(args, term, term, term, term)\n\t}\n\n\tquery := strings.Builder{}\n\tquery.WriteString(`SELECT id, action, username, connection_id, details, ip_address, created_at FROM audit_logs`)\n\tif len(whereClauses) > 0 {\n\t\tquery.WriteString(\" WHERE \")\n\t\tquery.WriteString(strings.Join(whereClauses, \" AND \"))\n\t}\n\tquery.WriteString(\" ORDER BY created_at DESC LIMIT ?\")\n\targs = append(args, limit)\n\n\trows, err := db.conn.Query(query.String(), args...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get audit logs: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar logs []AuditLog\n\tfor rows.Next() {\n\t\tvar l AuditLog\n\t\tvar username, connID, details, ip sql.NullString\n\t\tif err := rows.Scan(&l.ID, &l.Action, &username, &connID, &details, &ip, &l.CreatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan audit log: %w\", err)\n\t\t}\n\t\tl.Username = nullStringToPtr(username)\n\t\tl.ConnectionID = nullStringToPtr(connID)\n\t\tl.Details = nullStringToPtr(details)\n\t\tl.IPAddress = nullStringToPtr(ip)\n\t\tlogs = append(logs, l)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate audit log rows: %w\", err)\n\t}\n\treturn logs, nil\n}\n"
  },
  {
    "path": "internal/database/audit_logs_test.go",
    "content": "package database\n\nimport (\n\t\"path/filepath\"\n\t\"testing\"\n)\n\nfunc openTestDB(t *testing.T) *DB {\n\tt.Helper()\n\tdbPath := filepath.Join(t.TempDir(), \"test.db\")\n\tdb, err := Open(dbPath)\n\tif err != nil {\n\t\tt.Fatalf(\"open test db: %v\", err)\n\t}\n\tt.Cleanup(func() {\n\t\t_ = db.Close()\n\t})\n\treturn db\n}\n\nfunc insertAuditLogAt(t *testing.T, db *DB, action, username, details, ip, createdAtExpr string) {\n\tt.Helper()\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO audit_logs (id, action, username, connection_id, details, ip_address, created_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, `+createdAtExpr+`)`,\n\t\taction+\"-id-\"+username, action, username, \"conn-1\", details, ip,\n\t)\n\tif err != nil {\n\t\tt.Fatalf(\"insert audit log: %v\", err)\n\t}\n}\n\nfunc TestGetAuditLogsFiltered_TimeRangeActionUsernameSearch(t *testing.T) {\n\tdb := openTestDB(t)\n\n\tinsertAuditLogAt(t, db, \"connection.created\", \"alice\", \"Created warehouse connection\", \"10.0.0.10\", \"datetime('now', '-5 minutes')\")\n\tinsertAuditLogAt(t, db, \"connection.deleted\", \"bob\", \"Deleted old tunnel\", \"10.0.0.20\", \"datetime('now', '-2 hours')\")\n\tinsertAuditLogAt(t, db, \"user.login\", \"alice\", \"Login success\", \"10.0.0.11\", \"datetime('now', '-10 minutes')\")\n\n\trows, err := db.GetAuditLogsFiltered(100, \"1h\", \"connection.created\", \"alice\", \"warehouse\")\n\tif err != nil {\n\t\tt.Fatalf(\"GetAuditLogsFiltered: %v\", err)\n\t}\n\tif len(rows) != 1 {\n\t\tt.Fatalf(\"expected 1 row, got %d\", len(rows))\n\t}\n\tif rows[0].Action != \"connection.created\" {\n\t\tt.Fatalf(\"unexpected action: %s\", rows[0].Action)\n\t}\n\tif rows[0].Username == nil || *rows[0].Username != \"alice\" {\n\t\tt.Fatalf(\"unexpected username: %+v\", rows[0].Username)\n\t}\n}\n\nfunc TestGetAuditLogsFiltered_SearchMatchesMultipleFieldsCaseInsensitive(t *testing.T) {\n\tdb := openTestDB(t)\n\n\tinsertAuditLogAt(t, db, \"connection.created\", \"alice\", \"Created connection for ETL\", \"10.0.0.10\", \"datetime('now', '-5 minutes')\")\n\tinsertAuditLogAt(t, db, \"user.login\", \"charlie\", \"Login success\", \"10.0.0.21\", \"datetime('now', '-4 minutes')\")\n\n\trows, err := db.GetAuditLogsFiltered(100, \"\", \"\", \"\", \"etl\")\n\tif err != nil {\n\t\tt.Fatalf(\"GetAuditLogsFiltered search details: %v\", err)\n\t}\n\tif len(rows) != 1 || rows[0].Action != \"connection.created\" {\n\t\tt.Fatalf(\"expected details match on connection.created, got %+v\", rows)\n\t}\n\n\trows, err = db.GetAuditLogsFiltered(100, \"\", \"\", \"\", \"CHARLIE\")\n\tif err != nil {\n\t\tt.Fatalf(\"GetAuditLogsFiltered search username: %v\", err)\n\t}\n\tif len(rows) != 1 || rows[0].Action != \"user.login\" {\n\t\tt.Fatalf(\"expected username match on user.login, got %+v\", rows)\n\t}\n\n\trows, err = db.GetAuditLogsFiltered(100, \"\", \"\", \"\", \"10.0.0.10\")\n\tif err != nil {\n\t\tt.Fatalf(\"GetAuditLogsFiltered search ip: %v\", err)\n\t}\n\tif len(rows) != 1 || rows[0].Action != \"connection.created\" {\n\t\tt.Fatalf(\"expected ip match on connection.created, got %+v\", rows)\n\t}\n}\n"
  },
  {
    "path": "internal/database/brain.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// BrainProvider stores global AI provider configuration.\ntype BrainProvider struct {\n\tID        string  `json:\"id\"`\n\tName      string  `json:\"name\"`\n\tKind      string  `json:\"kind\"`\n\tBaseURL   *string `json:\"base_url\"`\n\tHasAPIKey bool    `json:\"has_api_key\"`\n\tIsActive  bool    `json:\"is_active\"`\n\tIsDefault bool    `json:\"is_default\"`\n\tCreatedBy *string `json:\"created_by\"`\n\tCreatedAt string  `json:\"created_at\"`\n\tUpdatedAt string  `json:\"updated_at\"`\n}\n\n// BrainProviderSecret returns provider config with secret for runtime use.\ntype BrainProviderSecret struct {\n\tBrainProvider\n\tEncryptedAPIKey *string `json:\"-\"`\n}\n\n// BrainModel stores available models per provider.\ntype BrainModel struct {\n\tID          string  `json:\"id\"`\n\tProviderID  string  `json:\"provider_id\"`\n\tName        string  `json:\"name\"`\n\tDisplayName *string `json:\"display_name\"`\n\tIsActive    bool    `json:\"is_active\"`\n\tIsDefault   bool    `json:\"is_default\"`\n\tCreatedAt   string  `json:\"created_at\"`\n\tUpdatedAt   string  `json:\"updated_at\"`\n}\n\n// BrainModelRuntime is used for chat execution.\ntype BrainModelRuntime struct {\n\tModelID              string\n\tModelName            string\n\tProviderID           string\n\tProviderName         string\n\tProviderKind         string\n\tProviderBaseURL      *string\n\tProviderEncryptedKey *string\n\tModelActive          bool\n\tProviderActive       bool\n}\n\n// BrainChat stores a user chat thread.\ntype BrainChat struct {\n\tID              string  `json:\"id\"`\n\tConnectionID    string  `json:\"connection_id\"`\n\tUsername        string  `json:\"username\"`\n\tTitle           string  `json:\"title\"`\n\tProviderID      *string `json:\"provider_id\"`\n\tModelID         *string `json:\"model_id\"`\n\tArchived        bool    `json:\"archived\"`\n\tLastMessageAt   *string `json:\"last_message_at\"`\n\tContextDatabase *string `json:\"context_database\"`\n\tContextTable    *string `json:\"context_table\"`\n\tContextTables   *string `json:\"context_tables\"`\n\tCreatedAt       string  `json:\"created_at\"`\n\tUpdatedAt       string  `json:\"updated_at\"`\n}\n\n// BrainMessage stores one chat turn.\ntype BrainMessage struct {\n\tID        string  `json:\"id\"`\n\tChatID    string  `json:\"chat_id\"`\n\tRole      string  `json:\"role\"`\n\tContent   string  `json:\"content\"`\n\tStatus    string  `json:\"status\"`\n\tError     *string `json:\"error\"`\n\tCreatedAt string  `json:\"created_at\"`\n\tUpdatedAt string  `json:\"updated_at\"`\n}\n\n// BrainArtifact stores generated artifacts linked to chats/messages.\ntype BrainArtifact struct {\n\tID        string  `json:\"id\"`\n\tChatID    string  `json:\"chat_id\"`\n\tMessageID *string `json:\"message_id\"`\n\tType      string  `json:\"type\"`\n\tTitle     string  `json:\"title\"`\n\tContent   string  `json:\"content\"`\n\tCreatedBy *string `json:\"created_by\"`\n\tCreatedAt string  `json:\"created_at\"`\n}\n\n// BrainToolCall stores tool invocation traces.\ntype BrainToolCall struct {\n\tID         string  `json:\"id\"`\n\tChatID     string  `json:\"chat_id\"`\n\tMessageID  string  `json:\"message_id\"`\n\tToolName   string  `json:\"tool_name\"`\n\tInputJSON  string  `json:\"input_json\"`\n\tOutputJSON string  `json:\"output_json\"`\n\tStatus     string  `json:\"status\"`\n\tError      *string `json:\"error\"`\n\tCreatedAt  string  `json:\"created_at\"`\n}\n\n// BrainSkill stores admin-managed assistant instructions.\ntype BrainSkill struct {\n\tID        string  `json:\"id\"`\n\tName      string  `json:\"name\"`\n\tContent   string  `json:\"content\"`\n\tIsActive  bool    `json:\"is_active\"`\n\tIsDefault bool    `json:\"is_default\"`\n\tCreatedBy *string `json:\"created_by\"`\n\tCreatedAt string  `json:\"created_at\"`\n\tUpdatedAt string  `json:\"updated_at\"`\n}\n\nfunc boolToInt(v bool) int {\n\tif v {\n\t\treturn 1\n\t}\n\treturn 0\n}\n\nfunc intToBool(v int) bool {\n\treturn v != 0\n}\n\nfunc nullableString(value string) interface{} {\n\tif strings.TrimSpace(value) == \"\" {\n\t\treturn nil\n\t}\n\treturn strings.TrimSpace(value)\n}\n\n// GetBrainProviders returns all providers.\nfunc (db *DB) GetBrainProviders() ([]BrainProvider, error) {\n\trows, err := db.conn.Query(`SELECT id, name, kind, base_url, encrypted_api_key, is_active, is_default, created_by, created_at, updated_at FROM brain_providers ORDER BY is_default DESC, name ASC`)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get brain providers: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tproviders := make([]BrainProvider, 0)\n\tfor rows.Next() {\n\t\tvar p BrainProvider\n\t\tvar baseURL, encrypted, createdBy sql.NullString\n\t\tvar active, def int\n\t\tif err := rows.Scan(&p.ID, &p.Name, &p.Kind, &baseURL, &encrypted, &active, &def, &createdBy, &p.CreatedAt, &p.UpdatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan brain provider: %w\", err)\n\t\t}\n\t\tp.BaseURL = nullStringToPtr(baseURL)\n\t\tp.HasAPIKey = encrypted.Valid && strings.TrimSpace(encrypted.String) != \"\"\n\t\tp.IsActive = intToBool(active)\n\t\tp.IsDefault = intToBool(def)\n\t\tp.CreatedBy = nullStringToPtr(createdBy)\n\t\tproviders = append(providers, p)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate brain providers: %w\", err)\n\t}\n\treturn providers, nil\n}\n\n// GetBrainProviderByID returns provider config including encrypted key.\nfunc (db *DB) GetBrainProviderByID(id string) (*BrainProviderSecret, error) {\n\trow := db.conn.QueryRow(`SELECT id, name, kind, base_url, encrypted_api_key, is_active, is_default, created_by, created_at, updated_at FROM brain_providers WHERE id = ?`, id)\n\tvar p BrainProviderSecret\n\tvar baseURL, encrypted, createdBy sql.NullString\n\tvar active, def int\n\tif err := row.Scan(&p.ID, &p.Name, &p.Kind, &baseURL, &encrypted, &active, &def, &createdBy, &p.CreatedAt, &p.UpdatedAt); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn nil, nil\n\t\t}\n\t\treturn nil, fmt.Errorf(\"get brain provider by id: %w\", err)\n\t}\n\tp.BaseURL = nullStringToPtr(baseURL)\n\tp.EncryptedAPIKey = nullStringToPtr(encrypted)\n\tp.HasAPIKey = encrypted.Valid && strings.TrimSpace(encrypted.String) != \"\"\n\tp.IsActive = intToBool(active)\n\tp.IsDefault = intToBool(def)\n\tp.CreatedBy = nullStringToPtr(createdBy)\n\treturn &p, nil\n}\n\n// CreateBrainProvider creates a provider.\nfunc (db *DB) CreateBrainProvider(name, kind, baseURL string, encryptedAPIKey *string, isActive, isDefault bool, createdBy string) (string, error) {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tid := uuid.NewString()\n\n\ttx, err := db.conn.Begin()\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"begin create brain provider: %w\", err)\n\t}\n\tdefer tx.Rollback()\n\n\tif isDefault {\n\t\tif _, err := tx.Exec(`UPDATE brain_providers SET is_default = 0, updated_at = ?`, now); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"clear default provider: %w\", err)\n\t\t}\n\t}\n\n\tif _, err := tx.Exec(\n\t\t`INSERT INTO brain_providers (id, name, kind, base_url, encrypted_api_key, is_active, is_default, created_by, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tid,\n\t\tstrings.TrimSpace(name),\n\t\tstrings.TrimSpace(kind),\n\t\tnullableString(baseURL),\n\t\tencryptedAPIKey,\n\t\tboolToInt(isActive),\n\t\tboolToInt(isDefault),\n\t\tnullableString(createdBy),\n\t\tnow,\n\t\tnow,\n\t); err != nil {\n\t\treturn \"\", fmt.Errorf(\"insert brain provider: %w\", err)\n\t}\n\n\tif err := tx.Commit(); err != nil {\n\t\treturn \"\", fmt.Errorf(\"commit create brain provider: %w\", err)\n\t}\n\n\treturn id, nil\n}\n\n// UpdateBrainProvider updates a provider.\nfunc (db *DB) UpdateBrainProvider(id, name, kind, baseURL string, encryptedAPIKey *string, updateAPIKey bool, isActive, isDefault bool) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\ttx, err := db.conn.Begin()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"begin update brain provider: %w\", err)\n\t}\n\tdefer tx.Rollback()\n\n\tif isDefault {\n\t\tif _, err := tx.Exec(`UPDATE brain_providers SET is_default = 0, updated_at = ?`, now); err != nil {\n\t\t\treturn fmt.Errorf(\"clear default provider: %w\", err)\n\t\t}\n\t}\n\n\tif updateAPIKey {\n\t\tif _, err := tx.Exec(\n\t\t\t`UPDATE brain_providers\n\t\t\t SET name = ?, kind = ?, base_url = ?, encrypted_api_key = ?, is_active = ?, is_default = ?, updated_at = ?\n\t\t\t WHERE id = ?`,\n\t\t\tstrings.TrimSpace(name),\n\t\t\tstrings.TrimSpace(kind),\n\t\t\tnullableString(baseURL),\n\t\t\tencryptedAPIKey,\n\t\t\tboolToInt(isActive),\n\t\t\tboolToInt(isDefault),\n\t\t\tnow,\n\t\t\tid,\n\t\t); err != nil {\n\t\t\treturn fmt.Errorf(\"update brain provider: %w\", err)\n\t\t}\n\t} else {\n\t\tif _, err := tx.Exec(\n\t\t\t`UPDATE brain_providers\n\t\t\t SET name = ?, kind = ?, base_url = ?, is_active = ?, is_default = ?, updated_at = ?\n\t\t\t WHERE id = ?`,\n\t\t\tstrings.TrimSpace(name),\n\t\t\tstrings.TrimSpace(kind),\n\t\t\tnullableString(baseURL),\n\t\t\tboolToInt(isActive),\n\t\t\tboolToInt(isDefault),\n\t\t\tnow,\n\t\t\tid,\n\t\t); err != nil {\n\t\t\treturn fmt.Errorf(\"update brain provider: %w\", err)\n\t\t}\n\t}\n\n\tif err := tx.Commit(); err != nil {\n\t\treturn fmt.Errorf(\"commit update brain provider: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteBrainProvider removes a provider and cascades dependent rows.\nfunc (db *DB) DeleteBrainProvider(id string) error {\n\tif _, err := db.conn.Exec(`DELETE FROM brain_providers WHERE id = ?`, id); err != nil {\n\t\treturn fmt.Errorf(\"delete brain provider: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetBrainModels returns models, optionally for one provider.\nfunc (db *DB) GetBrainModels(providerID string) ([]BrainModel, error) {\n\tquery := `SELECT id, provider_id, name, display_name, is_active, is_default, created_at, updated_at FROM brain_models`\n\targs := []interface{}{}\n\tif strings.TrimSpace(providerID) != \"\" {\n\t\tquery += ` WHERE provider_id = ?`\n\t\targs = append(args, providerID)\n\t}\n\tquery += ` ORDER BY is_default DESC, name ASC`\n\n\trows, err := db.conn.Query(query, args...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get brain models: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tmodels := make([]BrainModel, 0)\n\tfor rows.Next() {\n\t\tvar m BrainModel\n\t\tvar display sql.NullString\n\t\tvar active, def int\n\t\tif err := rows.Scan(&m.ID, &m.ProviderID, &m.Name, &display, &active, &def, &m.CreatedAt, &m.UpdatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan brain model: %w\", err)\n\t\t}\n\t\tm.DisplayName = nullStringToPtr(display)\n\t\tm.IsActive = intToBool(active)\n\t\tm.IsDefault = intToBool(def)\n\t\tmodels = append(models, m)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate brain models: %w\", err)\n\t}\n\treturn models, nil\n}\n\n// GetBrainModelByID returns one model by id.\nfunc (db *DB) GetBrainModelByID(id string) (*BrainModel, error) {\n\trow := db.conn.QueryRow(`SELECT id, provider_id, name, display_name, is_active, is_default, created_at, updated_at FROM brain_models WHERE id = ?`, id)\n\tvar m BrainModel\n\tvar display sql.NullString\n\tvar active, def int\n\tif err := row.Scan(&m.ID, &m.ProviderID, &m.Name, &display, &active, &def, &m.CreatedAt, &m.UpdatedAt); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn nil, nil\n\t\t}\n\t\treturn nil, fmt.Errorf(\"get brain model by id: %w\", err)\n\t}\n\tm.DisplayName = nullStringToPtr(display)\n\tm.IsActive = intToBool(active)\n\tm.IsDefault = intToBool(def)\n\treturn &m, nil\n}\n\n// EnsureBrainModel inserts or updates a model by provider+name and returns its id.\nfunc (db *DB) EnsureBrainModel(providerID, name, displayName string) (string, error) {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar existingID string\n\trow := db.conn.QueryRow(`SELECT id FROM brain_models WHERE provider_id = ? AND name = ?`, providerID, name)\n\tif err := row.Scan(&existingID); err != nil && err != sql.ErrNoRows {\n\t\treturn \"\", fmt.Errorf(\"lookup brain model: %w\", err)\n\t}\n\n\tif existingID != \"\" {\n\t\tif _, err := db.conn.Exec(`UPDATE brain_models SET display_name = ?, updated_at = ? WHERE id = ?`, nullableString(displayName), now, existingID); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"update brain model: %w\", err)\n\t\t}\n\t\treturn existingID, nil\n\t}\n\n\tid := uuid.NewString()\n\tif _, err := db.conn.Exec(\n\t\t`INSERT INTO brain_models (id, provider_id, name, display_name, is_active, is_default, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, 0, 0, ?, ?)`,\n\t\tid, providerID, strings.TrimSpace(name), nullableString(displayName), now, now,\n\t); err != nil {\n\t\treturn \"\", fmt.Errorf(\"insert brain model: %w\", err)\n\t}\n\n\treturn id, nil\n}\n\n// UpdateBrainModel updates model flags and display name.\nfunc (db *DB) UpdateBrainModel(id string, displayName string, isActive, isDefault bool) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\ttx, err := db.conn.Begin()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"begin update brain model: %w\", err)\n\t}\n\tdefer tx.Rollback()\n\n\tvar providerID string\n\tif err := tx.QueryRow(`SELECT provider_id FROM brain_models WHERE id = ?`, id).Scan(&providerID); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn nil\n\t\t}\n\t\treturn fmt.Errorf(\"load model provider: %w\", err)\n\t}\n\n\tif isDefault {\n\t\tif _, err := tx.Exec(`UPDATE brain_models SET is_default = 0, updated_at = ? WHERE provider_id = ?`, now, providerID); err != nil {\n\t\t\treturn fmt.Errorf(\"clear default model: %w\", err)\n\t\t}\n\t}\n\n\tif _, err := tx.Exec(\n\t\t`UPDATE brain_models SET display_name = ?, is_active = ?, is_default = ?, updated_at = ? WHERE id = ?`,\n\t\tnullableString(displayName), boolToInt(isActive), boolToInt(isDefault), now, id,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"update brain model: %w\", err)\n\t}\n\n\tif err := tx.Commit(); err != nil {\n\t\treturn fmt.Errorf(\"commit update brain model: %w\", err)\n\t}\n\treturn nil\n}\n\n// SetBrainModelActive updates active flag for a model without touching default flag.\nfunc (db *DB) SetBrainModelActive(id string, isActive bool) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE brain_models SET is_active = ?, updated_at = ? WHERE id = ?`,\n\t\tboolToInt(isActive), now, id,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"set brain model active: %w\", err)\n\t}\n\treturn nil\n}\n\n// ClearDefaultBrainModelsByProvider clears default flag for all models under one provider.\nfunc (db *DB) ClearDefaultBrainModelsByProvider(providerID string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE brain_models SET is_default = 0, updated_at = ? WHERE provider_id = ?`,\n\t\tnow, providerID,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"clear default brain models by provider: %w\", err)\n\t}\n\treturn nil\n}\n\n// ClearDefaultBrainModelByProviderExcept keeps one default and clears others for the same provider.\nfunc (db *DB) ClearDefaultBrainModelByProviderExcept(providerID, keepModelID string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(\n\t\t`UPDATE brain_models SET is_default = 0, updated_at = ? WHERE provider_id = ? AND id <> ?`,\n\t\tnow, providerID, keepModelID,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"clear default brain model except: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetDefaultBrainModelRuntime returns the default active model and provider.\nfunc (db *DB) GetDefaultBrainModelRuntime() (*BrainModelRuntime, error) {\n\trow := db.conn.QueryRow(`\n\t\tSELECT\n\t\t\tm.id,\n\t\t\tm.name,\n\t\t\tp.id,\n\t\t\tp.name,\n\t\t\tp.kind,\n\t\t\tp.base_url,\n\t\t\tp.encrypted_api_key,\n\t\t\tm.is_active,\n\t\t\tp.is_active\n\t\tFROM brain_models m\n\t\tJOIN brain_providers p ON p.id = m.provider_id\n\t\tWHERE m.is_active = 1 AND p.is_active = 1\n\t\tORDER BY m.is_default DESC, p.is_default DESC, m.created_at ASC\n\t\tLIMIT 1\n\t`)\n\n\tvar rt BrainModelRuntime\n\tvar baseURL, encrypted sql.NullString\n\tvar mActive, pActive int\n\tif err := row.Scan(&rt.ModelID, &rt.ModelName, &rt.ProviderID, &rt.ProviderName, &rt.ProviderKind, &baseURL, &encrypted, &mActive, &pActive); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn nil, nil\n\t\t}\n\t\treturn nil, fmt.Errorf(\"get default brain model runtime: %w\", err)\n\t}\n\tif baseURL.Valid {\n\t\trt.ProviderBaseURL = &baseURL.String\n\t}\n\tif encrypted.Valid {\n\t\trt.ProviderEncryptedKey = &encrypted.String\n\t}\n\trt.ModelActive = intToBool(mActive)\n\trt.ProviderActive = intToBool(pActive)\n\treturn &rt, nil\n}\n\n// GetBrainModelRuntimeByID returns model/provider runtime config.\nfunc (db *DB) GetBrainModelRuntimeByID(modelID string) (*BrainModelRuntime, error) {\n\trow := db.conn.QueryRow(`\n\t\tSELECT\n\t\t\tm.id,\n\t\t\tm.name,\n\t\t\tp.id,\n\t\t\tp.name,\n\t\t\tp.kind,\n\t\t\tp.base_url,\n\t\t\tp.encrypted_api_key,\n\t\t\tm.is_active,\n\t\t\tp.is_active\n\t\tFROM brain_models m\n\t\tJOIN brain_providers p ON p.id = m.provider_id\n\t\tWHERE m.id = ?\n\t\tLIMIT 1\n\t`, modelID)\n\n\tvar rt BrainModelRuntime\n\tvar baseURL, encrypted sql.NullString\n\tvar mActive, pActive int\n\tif err := row.Scan(&rt.ModelID, &rt.ModelName, &rt.ProviderID, &rt.ProviderName, &rt.ProviderKind, &baseURL, &encrypted, &mActive, &pActive); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn nil, nil\n\t\t}\n\t\treturn nil, fmt.Errorf(\"get brain model runtime by id: %w\", err)\n\t}\n\tif baseURL.Valid {\n\t\trt.ProviderBaseURL = &baseURL.String\n\t}\n\tif encrypted.Valid {\n\t\trt.ProviderEncryptedKey = &encrypted.String\n\t}\n\trt.ModelActive = intToBool(mActive)\n\trt.ProviderActive = intToBool(pActive)\n\treturn &rt, nil\n}\n\n// GetBrainSkills lists all skills.\nfunc (db *DB) GetBrainSkills() ([]BrainSkill, error) {\n\trows, err := db.conn.Query(`SELECT id, name, content, is_active, is_default, created_by, created_at, updated_at FROM brain_skills ORDER BY is_default DESC, updated_at DESC`)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get brain skills: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tskills := make([]BrainSkill, 0)\n\tfor rows.Next() {\n\t\tvar s BrainSkill\n\t\tvar active, def int\n\t\tvar createdBy sql.NullString\n\t\tif err := rows.Scan(&s.ID, &s.Name, &s.Content, &active, &def, &createdBy, &s.CreatedAt, &s.UpdatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan brain skill: %w\", err)\n\t\t}\n\t\ts.IsActive = intToBool(active)\n\t\ts.IsDefault = intToBool(def)\n\t\ts.CreatedBy = nullStringToPtr(createdBy)\n\t\tskills = append(skills, s)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate brain skills: %w\", err)\n\t}\n\treturn skills, nil\n}\n\n// GetActiveBrainSkill returns the active skill content.\nfunc (db *DB) GetActiveBrainSkill() (*BrainSkill, error) {\n\trow := db.conn.QueryRow(`SELECT id, name, content, is_active, is_default, created_by, created_at, updated_at FROM brain_skills WHERE is_active = 1 ORDER BY is_default DESC, updated_at DESC LIMIT 1`)\n\tvar s BrainSkill\n\tvar active, def int\n\tvar createdBy sql.NullString\n\tif err := row.Scan(&s.ID, &s.Name, &s.Content, &active, &def, &createdBy, &s.CreatedAt, &s.UpdatedAt); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn nil, nil\n\t\t}\n\t\treturn nil, fmt.Errorf(\"get active brain skill: %w\", err)\n\t}\n\ts.IsActive = intToBool(active)\n\ts.IsDefault = intToBool(def)\n\ts.CreatedBy = nullStringToPtr(createdBy)\n\treturn &s, nil\n}\n\n// GetBrainSkillByID returns one skill by id.\nfunc (db *DB) GetBrainSkillByID(id string) (*BrainSkill, error) {\n\trow := db.conn.QueryRow(`SELECT id, name, content, is_active, is_default, created_by, created_at, updated_at FROM brain_skills WHERE id = ?`, id)\n\tvar s BrainSkill\n\tvar active, def int\n\tvar createdBy sql.NullString\n\tif err := row.Scan(&s.ID, &s.Name, &s.Content, &active, &def, &createdBy, &s.CreatedAt, &s.UpdatedAt); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn nil, nil\n\t\t}\n\t\treturn nil, fmt.Errorf(\"get brain skill by id: %w\", err)\n\t}\n\ts.IsActive = intToBool(active)\n\ts.IsDefault = intToBool(def)\n\ts.CreatedBy = nullStringToPtr(createdBy)\n\treturn &s, nil\n}\n\n// UpsertDefaultBrainSkill stores a default skill, creating one if needed.\nfunc (db *DB) UpsertDefaultBrainSkill(name, content, createdBy string) (string, error) {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tvar existingID string\n\terr := db.conn.QueryRow(`SELECT id FROM brain_skills WHERE is_default = 1 LIMIT 1`).Scan(&existingID)\n\tif err != nil && err != sql.ErrNoRows {\n\t\treturn \"\", fmt.Errorf(\"lookup default brain skill: %w\", err)\n\t}\n\tif existingID == \"\" {\n\t\tid := uuid.NewString()\n\t\tif _, err := db.conn.Exec(`INSERT INTO brain_skills (id, name, content, is_active, is_default, created_by, created_at, updated_at) VALUES (?, ?, ?, 1, 1, ?, ?, ?)`, id, name, content, nullableString(createdBy), now, now); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"insert default brain skill: %w\", err)\n\t\t}\n\t\treturn id, nil\n\t}\n\n\tif _, err := db.conn.Exec(`UPDATE brain_skills SET name = ?, content = ?, is_active = 1, updated_at = ? WHERE id = ?`, name, content, now, existingID); err != nil {\n\t\treturn \"\", fmt.Errorf(\"update default brain skill: %w\", err)\n\t}\n\treturn existingID, nil\n}\n\n// UpdateBrainSkill updates skill content and active/default flags.\nfunc (db *DB) UpdateBrainSkill(id, name, content string, isActive, isDefault bool) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\ttx, err := db.conn.Begin()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"begin update brain skill: %w\", err)\n\t}\n\tdefer tx.Rollback()\n\n\tif isDefault {\n\t\tif _, err := tx.Exec(`UPDATE brain_skills SET is_default = 0, updated_at = ?`, now); err != nil {\n\t\t\treturn fmt.Errorf(\"clear default brain skill: %w\", err)\n\t\t}\n\t}\n\tif isActive {\n\t\tif _, err := tx.Exec(`UPDATE brain_skills SET is_active = 0, updated_at = ?`, now); err != nil {\n\t\t\treturn fmt.Errorf(\"clear active brain skill: %w\", err)\n\t\t}\n\t}\n\n\tif _, err := tx.Exec(`UPDATE brain_skills SET name = ?, content = ?, is_active = ?, is_default = ?, updated_at = ? WHERE id = ?`, name, content, boolToInt(isActive), boolToInt(isDefault), now, id); err != nil {\n\t\treturn fmt.Errorf(\"update brain skill: %w\", err)\n\t}\n\n\tif err := tx.Commit(); err != nil {\n\t\treturn fmt.Errorf(\"commit update brain skill: %w\", err)\n\t}\n\treturn nil\n}\n\n// CreateBrainSkill creates a new skill.\nfunc (db *DB) CreateBrainSkill(name, content, createdBy string, isActive, isDefault bool) (string, error) {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tid := uuid.NewString()\n\ttx, err := db.conn.Begin()\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"begin create brain skill: %w\", err)\n\t}\n\tdefer tx.Rollback()\n\n\tif isDefault {\n\t\tif _, err := tx.Exec(`UPDATE brain_skills SET is_default = 0, updated_at = ?`, now); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"clear default brain skill: %w\", err)\n\t\t}\n\t}\n\tif isActive {\n\t\tif _, err := tx.Exec(`UPDATE brain_skills SET is_active = 0, updated_at = ?`, now); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"clear active brain skill: %w\", err)\n\t\t}\n\t}\n\n\tif _, err := tx.Exec(`INSERT INTO brain_skills (id, name, content, is_active, is_default, created_by, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, id, name, content, boolToInt(isActive), boolToInt(isDefault), nullableString(createdBy), now, now); err != nil {\n\t\treturn \"\", fmt.Errorf(\"insert brain skill: %w\", err)\n\t}\n\n\tif err := tx.Commit(); err != nil {\n\t\treturn \"\", fmt.Errorf(\"commit create brain skill: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// GetBrainChatsByUser returns chats for a user scoped to connection.\nfunc (db *DB) GetBrainChatsByUser(username, connectionID string, includeArchived bool) ([]BrainChat, error) {\n\tquery := `SELECT id, connection_id, username, title, provider_id, model_id, archived, last_message_at, context_database, context_table, context_tables, created_at, updated_at FROM brain_chats WHERE username = ? AND connection_id = ?`\n\targs := []interface{}{username, connectionID}\n\tif !includeArchived {\n\t\tquery += ` AND archived = 0`\n\t}\n\tquery += ` ORDER BY COALESCE(last_message_at, updated_at) DESC`\n\n\trows, err := db.conn.Query(query, args...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get brain chats by user: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tchats := make([]BrainChat, 0)\n\tfor rows.Next() {\n\t\tchat, err := scanBrainChat(rows)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tchats = append(chats, chat)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate brain chats: %w\", err)\n\t}\n\treturn chats, nil\n}\n\nfunc scanBrainChat(scanner interface {\n\tScan(dest ...interface{}) error\n}) (BrainChat, error) {\n\tvar c BrainChat\n\tvar providerID, modelID, lastMessageAt, ctxDB, ctxTable, ctxTables sql.NullString\n\tvar archived int\n\tif err := scanner.Scan(&c.ID, &c.ConnectionID, &c.Username, &c.Title, &providerID, &modelID, &archived, &lastMessageAt, &ctxDB, &ctxTable, &ctxTables, &c.CreatedAt, &c.UpdatedAt); err != nil {\n\t\treturn BrainChat{}, fmt.Errorf(\"scan brain chat: %w\", err)\n\t}\n\tc.ProviderID = nullStringToPtr(providerID)\n\tc.ModelID = nullStringToPtr(modelID)\n\tc.LastMessageAt = nullStringToPtr(lastMessageAt)\n\tc.ContextDatabase = nullStringToPtr(ctxDB)\n\tc.ContextTable = nullStringToPtr(ctxTable)\n\tc.ContextTables = nullStringToPtr(ctxTables)\n\tc.Archived = intToBool(archived)\n\treturn c, nil\n}\n\n// GetBrainChatByIDForUser loads one chat if owned by user.\nfunc (db *DB) GetBrainChatByIDForUser(chatID, username string) (*BrainChat, error) {\n\trow := db.conn.QueryRow(`SELECT id, connection_id, username, title, provider_id, model_id, archived, last_message_at, context_database, context_table, context_tables, created_at, updated_at FROM brain_chats WHERE id = ? AND username = ?`, chatID, username)\n\tvar c BrainChat\n\tvar providerID, modelID, lastMessageAt, ctxDB, ctxTable, ctxTables sql.NullString\n\tvar archived int\n\tif err := row.Scan(&c.ID, &c.ConnectionID, &c.Username, &c.Title, &providerID, &modelID, &archived, &lastMessageAt, &ctxDB, &ctxTable, &ctxTables, &c.CreatedAt, &c.UpdatedAt); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn nil, nil\n\t\t}\n\t\treturn nil, fmt.Errorf(\"get brain chat by id for user: %w\", err)\n\t}\n\tc.ProviderID = nullStringToPtr(providerID)\n\tc.ModelID = nullStringToPtr(modelID)\n\tc.LastMessageAt = nullStringToPtr(lastMessageAt)\n\tc.ContextDatabase = nullStringToPtr(ctxDB)\n\tc.ContextTable = nullStringToPtr(ctxTable)\n\tc.ContextTables = nullStringToPtr(ctxTables)\n\tc.Archived = intToBool(archived)\n\treturn &c, nil\n}\n\n// CreateBrainChat creates a chat thread.\nfunc (db *DB) CreateBrainChat(username, connectionID, title, providerID, modelID, contextDatabase, contextTable, contextTables string) (string, error) {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tid := uuid.NewString()\n\tif strings.TrimSpace(title) == \"\" {\n\t\ttitle = \"New Chat\"\n\t}\n\n\tif _, err := db.conn.Exec(\n\t\t`INSERT INTO brain_chats (id, connection_id, username, title, provider_id, model_id, archived, last_message_at, context_database, context_table, context_tables, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, 0, NULL, ?, ?, ?, ?, ?)`,\n\t\tid, connectionID, username, strings.TrimSpace(title), nullableString(providerID), nullableString(modelID), nullableString(contextDatabase), nullableString(contextTable), nullableString(contextTables), now, now,\n\t); err != nil {\n\t\treturn \"\", fmt.Errorf(\"create brain chat: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdateBrainChat updates mutable chat properties.\nfunc (db *DB) UpdateBrainChat(chatID, title, providerID, modelID string, archived bool, contextDatabase, contextTable, contextTables string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(`UPDATE brain_chats SET title = ?, provider_id = ?, model_id = ?, archived = ?, context_database = ?, context_table = ?, context_tables = ?, updated_at = ? WHERE id = ?`,\n\t\tstrings.TrimSpace(title), nullableString(providerID), nullableString(modelID), boolToInt(archived), nullableString(contextDatabase), nullableString(contextTable), nullableString(contextTables), now, chatID); err != nil {\n\t\treturn fmt.Errorf(\"update brain chat: %w\", err)\n\t}\n\treturn nil\n}\n\n// TouchBrainChat updates last activity timestamp.\nfunc (db *DB) TouchBrainChat(chatID string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(`UPDATE brain_chats SET last_message_at = ?, updated_at = ? WHERE id = ?`, now, now, chatID); err != nil {\n\t\treturn fmt.Errorf(\"touch brain chat: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteBrainChat deletes a chat.\nfunc (db *DB) DeleteBrainChat(chatID string) error {\n\tif _, err := db.conn.Exec(`DELETE FROM brain_chats WHERE id = ?`, chatID); err != nil {\n\t\treturn fmt.Errorf(\"delete brain chat: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetBrainMessages lists all messages in a chat.\nfunc (db *DB) GetBrainMessages(chatID string) ([]BrainMessage, error) {\n\trows, err := db.conn.Query(`SELECT id, chat_id, role, content, status, error, created_at, updated_at FROM brain_messages WHERE chat_id = ? ORDER BY created_at ASC`, chatID)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get brain messages: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tmessages := make([]BrainMessage, 0)\n\tfor rows.Next() {\n\t\tvar m BrainMessage\n\t\tvar msgErr sql.NullString\n\t\tif err := rows.Scan(&m.ID, &m.ChatID, &m.Role, &m.Content, &m.Status, &msgErr, &m.CreatedAt, &m.UpdatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan brain message: %w\", err)\n\t\t}\n\t\tm.Error = nullStringToPtr(msgErr)\n\t\tmessages = append(messages, m)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate brain messages: %w\", err)\n\t}\n\treturn messages, nil\n}\n\n// CreateBrainMessage creates one message.\nfunc (db *DB) CreateBrainMessage(chatID, role, content, status, errorText string) (string, error) {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tid := uuid.NewString()\n\tif strings.TrimSpace(status) == \"\" {\n\t\tstatus = \"complete\"\n\t}\n\tif _, err := db.conn.Exec(`INSERT INTO brain_messages (id, chat_id, role, content, status, error, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, id, chatID, strings.TrimSpace(role), content, strings.TrimSpace(status), nullableString(errorText), now, now); err != nil {\n\t\treturn \"\", fmt.Errorf(\"create brain message: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdateBrainMessage updates generated content/state.\nfunc (db *DB) UpdateBrainMessage(id, content, status, errorText string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := db.conn.Exec(`UPDATE brain_messages SET content = ?, status = ?, error = ?, updated_at = ? WHERE id = ?`, content, strings.TrimSpace(status), nullableString(errorText), now, id); err != nil {\n\t\treturn fmt.Errorf(\"update brain message: %w\", err)\n\t}\n\treturn nil\n}\n\n// CreateBrainArtifact stores a generated artifact.\nfunc (db *DB) CreateBrainArtifact(chatID, messageID, artifactType, title, content, createdBy string) (string, error) {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tid := uuid.NewString()\n\tif _, err := db.conn.Exec(`INSERT INTO brain_artifacts (id, chat_id, message_id, artifact_type, title, content, created_by, created_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, id, chatID, nullableString(messageID), strings.TrimSpace(artifactType), strings.TrimSpace(title), content, nullableString(createdBy), now); err != nil {\n\t\treturn \"\", fmt.Errorf(\"create brain artifact: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// GetBrainArtifacts lists artifacts for a chat.\nfunc (db *DB) GetBrainArtifacts(chatID string) ([]BrainArtifact, error) {\n\trows, err := db.conn.Query(`SELECT id, chat_id, message_id, artifact_type, title, content, created_by, created_at FROM brain_artifacts WHERE chat_id = ? ORDER BY created_at DESC`, chatID)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get brain artifacts: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tartifacts := make([]BrainArtifact, 0)\n\tfor rows.Next() {\n\t\tvar a BrainArtifact\n\t\tvar messageID, createdBy sql.NullString\n\t\tif err := rows.Scan(&a.ID, &a.ChatID, &messageID, &a.Type, &a.Title, &a.Content, &createdBy, &a.CreatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan brain artifact: %w\", err)\n\t\t}\n\t\ta.MessageID = nullStringToPtr(messageID)\n\t\ta.CreatedBy = nullStringToPtr(createdBy)\n\t\tartifacts = append(artifacts, a)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate brain artifacts: %w\", err)\n\t}\n\treturn artifacts, nil\n}\n\n// CreateBrainToolCall stores a tool execution trace.\nfunc (db *DB) CreateBrainToolCall(chatID, messageID, toolName, inputJSON, outputJSON, status, errorText string) (string, error) {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tid := uuid.NewString()\n\tif _, err := db.conn.Exec(`INSERT INTO brain_tool_calls (id, chat_id, message_id, tool_name, input_json, output_json, status, error, created_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`, id, chatID, messageID, toolName, inputJSON, outputJSON, status, nullableString(errorText), now); err != nil {\n\t\treturn \"\", fmt.Errorf(\"create brain tool call: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// GetBrainModelsWithProvider returns active models and provider metadata for UI pickers.\nfunc (db *DB) GetBrainModelsWithProvider(activeOnly bool) ([]map[string]interface{}, error) {\n\tquery := `\n\t\tSELECT\n\t\t\tm.id,\n\t\t\tm.name,\n\t\t\tCOALESCE(m.display_name, ''),\n\t\t\tm.provider_id,\n\t\t\tp.name,\n\t\t\tp.kind,\n\t\t\tm.is_active,\n\t\t\tm.is_default,\n\t\t\tp.is_active,\n\t\t\tp.is_default\n\t\tFROM brain_models m\n\t\tJOIN brain_providers p ON p.id = m.provider_id\n\t`\n\tif activeOnly {\n\t\tquery += ` WHERE m.is_active = 1 AND p.is_active = 1`\n\t}\n\tquery += ` ORDER BY m.is_default DESC, p.is_default DESC, p.name ASC, m.name ASC`\n\n\trows, err := db.conn.Query(query)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get brain models with provider: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\titems := make([]map[string]interface{}, 0)\n\tfor rows.Next() {\n\t\tvar modelID, modelName, display, providerID, providerName, providerKind string\n\t\tvar modelActive, modelDefault, providerActive, providerDefault int\n\t\tif err := rows.Scan(&modelID, &modelName, &display, &providerID, &providerName, &providerKind, &modelActive, &modelDefault, &providerActive, &providerDefault); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan brain model picker row: %w\", err)\n\t\t}\n\t\titems = append(items, map[string]interface{}{\n\t\t\t\"id\":               modelID,\n\t\t\t\"name\":             modelName,\n\t\t\t\"display_name\":     display,\n\t\t\t\"provider_id\":      providerID,\n\t\t\t\"provider_name\":    providerName,\n\t\t\t\"provider_kind\":    providerKind,\n\t\t\t\"is_active\":        intToBool(modelActive),\n\t\t\t\"is_default\":       intToBool(modelDefault),\n\t\t\t\"provider_active\":  intToBool(providerActive),\n\t\t\t\"provider_default\": intToBool(providerDefault),\n\t\t})\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate brain model picker rows: %w\", err)\n\t}\n\treturn items, nil\n}\n"
  },
  {
    "path": "internal/database/cleanup.go",
    "content": "package database\n\nimport (\n\t\"log/slog\"\n\t\"time\"\n)\n\n// StartCleanupJobs launches background goroutines that periodically clean up\n// expired sessions and expired rate limits.\nfunc (db *DB) StartCleanupJobs() {\n\tslog.Info(\"Starting periodic cleanup jobs...\")\n\n\t// Cleanup expired sessions (every 1 hour)\n\tgo func() {\n\t\tticker := time.NewTicker(1 * time.Hour)\n\t\tdefer ticker.Stop()\n\t\tfor range ticker.C {\n\t\t\tdb.cleanupExpiredSessions()\n\t\t}\n\t}()\n\n\t// Cleanup expired rate limits (every 10 minutes)\n\tgo func() {\n\t\tticker := time.NewTicker(10 * time.Minute)\n\t\tdefer ticker.Stop()\n\t\tfor range ticker.C {\n\t\t\tdb.cleanupRateLimits()\n\t\t}\n\t}()\n\n\tslog.Info(\"Cleanup jobs scheduled\")\n}\n\n// cleanupExpiredSessions removes sessions that have passed their expiration time.\nfunc (db *DB) cleanupExpiredSessions() {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tresult, err := db.conn.Exec(\"DELETE FROM sessions WHERE expires_at < ?\", now)\n\tif err != nil {\n\t\tslog.Error(\"Failed to cleanup expired sessions\", \"error\", err)\n\t\treturn\n\t}\n\tif affected, _ := result.RowsAffected(); affected > 0 {\n\t\tslog.Info(\"Cleaned up expired sessions\", \"count\", affected)\n\t}\n}\n\n// cleanupRateLimits removes expired rate limit entries (15-minute window).\nfunc (db *DB) cleanupRateLimits() {\n\tconst windowMs int64 = 15 * 60 * 1000 // 15 minutes\n\tcleaned, err := db.CleanupExpiredRateLimits(windowMs)\n\tif err != nil {\n\t\tslog.Error(\"Failed to cleanup rate limits\", \"error\", err)\n\t\treturn\n\t}\n\tif cleaned > 0 {\n\t\tslog.Info(\"Cleaned up expired rate limits\", \"count\", cleaned)\n\t}\n}\n"
  },
  {
    "path": "internal/database/connections.go",
    "content": "package database\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// Connection represents a connection record (agent or embedded).\ntype Connection struct {\n\tID           string  `json:\"id\"`\n\tName         string  `json:\"name\"`\n\tTunnelToken  string  `json:\"tunnel_token\"`\n\tIsEmbedded   bool    `json:\"is_embedded\"`\n\tStatus       string  `json:\"status\"`\n\tLastSeenAt   *string `json:\"last_seen_at\"`\n\tHostInfoJSON *string `json:\"host_info\"`\n\tCreatedAt    string  `json:\"created_at\"`\n}\n\n// HostInfo represents the host machine metrics reported by the tunnel agent.\ntype HostInfo struct {\n\tHostname    string  `json:\"hostname\"`\n\tOS          string  `json:\"os\"`\n\tArch        string  `json:\"arch\"`\n\tCPUCores    int     `json:\"cpu_cores\"`\n\tMemoryTotal int64   `json:\"memory_total\"`\n\tMemoryFree  int64   `json:\"memory_free\"`\n\tDiskTotal   int64   `json:\"disk_total\"`\n\tDiskFree    int64   `json:\"disk_free\"`\n\tGoVersion   string  `json:\"go_version\"`\n\tAgentUptime float64 `json:\"agent_uptime\"`\n\tCollectedAt string  `json:\"collected_at\"`\n}\n\n// GetConnections retrieves all connections ordered by creation date.\nfunc (db *DB) GetConnections() ([]Connection, error) {\n\trows, err := db.conn.Query(\n\t\t\"SELECT id, name, tunnel_token, is_embedded, status, last_seen_at, host_info, created_at FROM connections ORDER BY created_at ASC\",\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get connections: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar conns []Connection\n\tfor rows.Next() {\n\t\tvar c Connection\n\t\tvar lastSeenAt, hostInfo sql.NullString\n\t\tvar isEmbedded int\n\t\tif err := rows.Scan(&c.ID, &c.Name, &c.TunnelToken, &isEmbedded, &c.Status, &lastSeenAt, &hostInfo, &c.CreatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan connection: %w\", err)\n\t\t}\n\t\tc.IsEmbedded = isEmbedded == 1\n\t\tc.LastSeenAt = nullStringToPtr(lastSeenAt)\n\t\tc.HostInfoJSON = nullStringToPtr(hostInfo)\n\t\tconns = append(conns, c)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate connection rows: %w\", err)\n\t}\n\treturn conns, nil\n}\n\n// GetConnectionByToken retrieves a connection by its tunnel token.\nfunc (db *DB) GetConnectionByToken(token string) (*Connection, error) {\n\trow := db.conn.QueryRow(\n\t\t\"SELECT id, name, tunnel_token, is_embedded, status, last_seen_at, host_info, created_at FROM connections WHERE tunnel_token = ?\",\n\t\ttoken,\n\t)\n\n\tvar c Connection\n\tvar lastSeenAt, hostInfo sql.NullString\n\tvar isEmbedded int\n\n\terr := row.Scan(\n\t\t&c.ID, &c.Name, &c.TunnelToken, &isEmbedded, &c.Status,\n\t\t&lastSeenAt, &hostInfo, &c.CreatedAt,\n\t)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get connection by token: %w\", err)\n\t}\n\n\tc.IsEmbedded = isEmbedded == 1\n\tc.LastSeenAt = nullStringToPtr(lastSeenAt)\n\tc.HostInfoJSON = nullStringToPtr(hostInfo)\n\treturn &c, nil\n}\n\n// GetConnectionByTokenCtx retrieves a connection by its tunnel token using a context.\n// This is used by tunnel auth to avoid hanging while SQLite is busy.\nfunc (db *DB) GetConnectionByTokenCtx(ctx context.Context, token string) (*Connection, error) {\n\trow := db.conn.QueryRowContext(ctx,\n\t\t\"SELECT id, name, tunnel_token, is_embedded, status, last_seen_at, host_info, created_at FROM connections WHERE tunnel_token = ?\",\n\t\ttoken,\n\t)\n\n\tvar c Connection\n\tvar lastSeenAt, hostInfo sql.NullString\n\tvar isEmbedded int\n\n\terr := row.Scan(\n\t\t&c.ID, &c.Name, &c.TunnelToken, &isEmbedded, &c.Status,\n\t\t&lastSeenAt, &hostInfo, &c.CreatedAt,\n\t)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get connection by token: %w\", err)\n\t}\n\n\tc.IsEmbedded = isEmbedded == 1\n\tc.LastSeenAt = nullStringToPtr(lastSeenAt)\n\tc.HostInfoJSON = nullStringToPtr(hostInfo)\n\treturn &c, nil\n}\n\n// GetConnectionByID retrieves a connection by its ID.\nfunc (db *DB) GetConnectionByID(id string) (*Connection, error) {\n\trow := db.conn.QueryRow(\n\t\t\"SELECT id, name, tunnel_token, is_embedded, status, last_seen_at, host_info, created_at FROM connections WHERE id = ?\", id,\n\t)\n\n\tvar c Connection\n\tvar lastSeenAt, hostInfo sql.NullString\n\tvar isEmbedded int\n\n\terr := row.Scan(&c.ID, &c.Name, &c.TunnelToken, &isEmbedded, &c.Status, &lastSeenAt, &hostInfo, &c.CreatedAt)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get connection by id: %w\", err)\n\t}\n\n\tc.IsEmbedded = isEmbedded == 1\n\tc.LastSeenAt = nullStringToPtr(lastSeenAt)\n\tc.HostInfoJSON = nullStringToPtr(hostInfo)\n\treturn &c, nil\n}\n\n// GetConnectionCount returns the total number of connections.\nfunc (db *DB) GetConnectionCount() (int, error) {\n\tvar count int\n\terr := db.conn.QueryRow(\"SELECT COUNT(*) FROM connections\").Scan(&count)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"get connection count: %w\", err)\n\t}\n\treturn count, nil\n}\n\n// CreateConnection creates a new connection and returns its ID.\nfunc (db *DB) CreateConnection(name, token string, isEmbedded bool) (string, error) {\n\tid := uuid.NewString()\n\tembedded := 0\n\tif isEmbedded {\n\t\tembedded = 1\n\t}\n\t_, err := db.conn.Exec(\n\t\t\"INSERT INTO connections (id, name, tunnel_token, is_embedded) VALUES (?, ?, ?, ?)\",\n\t\tid, name, token, embedded,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create connection: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdateConnectionStatus updates the status and last_seen_at of a connection.\nfunc (db *DB) UpdateConnectionStatus(id, status string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\t_, err := db.conn.Exec(\n\t\t\"UPDATE connections SET status = ?, last_seen_at = ? WHERE id = ?\",\n\t\tstatus, now, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update connection status: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteConnection deletes a connection by its ID.\nfunc (db *DB) DeleteConnection(id string) error {\n\t_, err := db.conn.Exec(\"DELETE FROM connections WHERE id = ?\", id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete connection: %w\", err)\n\t}\n\treturn nil\n}\n\n// UpdateConnectionToken updates the tunnel token for a connection.\nfunc (db *DB) UpdateConnectionToken(id, newToken string) error {\n\t_, err := db.conn.Exec(\"UPDATE connections SET tunnel_token = ? WHERE id = ?\", newToken, id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update connection token: %w\", err)\n\t}\n\treturn nil\n}\n\n// UpdateConnectionName updates the display name for a connection.\nfunc (db *DB) UpdateConnectionName(id, newName string) error {\n\t_, err := db.conn.Exec(\"UPDATE connections SET name = ? WHERE id = ?\", newName, id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update connection name: %w\", err)\n\t}\n\treturn nil\n}\n\n// UpdateConnectionHostInfo stores the host info JSON for a connection.\nfunc (db *DB) UpdateConnectionHostInfo(connId string, info HostInfo) error {\n\tdata, err := json.Marshal(info)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"marshal host info: %w\", err)\n\t}\n\t_, err = db.conn.Exec(\n\t\t\"UPDATE connections SET host_info = ? WHERE id = ?\",\n\t\tstring(data), connId,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update connection host info: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetConnectionHostInfo retrieves the parsed host info for a connection.\nfunc (db *DB) GetConnectionHostInfo(connId string) (*HostInfo, error) {\n\tvar hostInfoStr sql.NullString\n\terr := db.conn.QueryRow(\n\t\t\"SELECT host_info FROM connections WHERE id = ?\", connId,\n\t).Scan(&hostInfoStr)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get connection host info: %w\", err)\n\t}\n\n\tif !hostInfoStr.Valid || hostInfoStr.String == \"\" {\n\t\treturn nil, nil\n\t}\n\n\tvar info HostInfo\n\tif err := json.Unmarshal([]byte(hostInfoStr.String), &info); err != nil {\n\t\treturn nil, nil\n\t}\n\treturn &info, nil\n}\n\n// GetEmbeddedConnection retrieves the embedded connection (if any).\nfunc (db *DB) GetEmbeddedConnection() (*Connection, error) {\n\trow := db.conn.QueryRow(\n\t\t\"SELECT id, name, tunnel_token, is_embedded, status, last_seen_at, host_info, created_at FROM connections WHERE is_embedded = 1 LIMIT 1\",\n\t)\n\n\tvar c Connection\n\tvar lastSeenAt, hostInfo sql.NullString\n\tvar isEmbedded int\n\n\terr := row.Scan(&c.ID, &c.Name, &c.TunnelToken, &isEmbedded, &c.Status, &lastSeenAt, &hostInfo, &c.CreatedAt)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get embedded connection: %w\", err)\n\t}\n\n\tc.IsEmbedded = isEmbedded == 1\n\tc.LastSeenAt = nullStringToPtr(lastSeenAt)\n\tc.HostInfoJSON = nullStringToPtr(hostInfo)\n\treturn &c, nil\n}\n"
  },
  {
    "path": "internal/database/dashboards.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\nconst (\n\tsystemDashboardName        = \"System Overview\"\n\tsystemDashboardDescription = \"Built-in operational dashboard for ClickHouse health and query performance.\"\n\tsystemDashboardCreatedBy   = \"system\"\n)\n\n// Dashboard represents a dashboard record.\ntype Dashboard struct {\n\tID          string  `json:\"id\"`\n\tName        string  `json:\"name\"`\n\tDescription *string `json:\"description\"`\n\tCreatedBy   *string `json:\"created_by\"`\n\tCreatedAt   string  `json:\"created_at\"`\n\tUpdatedAt   string  `json:\"updated_at\"`\n}\n\n// Panel represents a dashboard panel.\ntype Panel struct {\n\tID           string  `json:\"id\"`\n\tDashboardID  string  `json:\"dashboard_id\"`\n\tName         string  `json:\"name\"`\n\tPanelType    string  `json:\"panel_type\"`\n\tQuery        string  `json:\"query\"`\n\tConnectionID *string `json:\"connection_id\"`\n\tConfig       string  `json:\"config\"`\n\tLayoutX      int     `json:\"layout_x\"`\n\tLayoutY      int     `json:\"layout_y\"`\n\tLayoutW      int     `json:\"layout_w\"`\n\tLayoutH      int     `json:\"layout_h\"`\n\tCreatedAt    string  `json:\"created_at\"`\n\tUpdatedAt    string  `json:\"updated_at\"`\n}\n\n// GetDashboards retrieves all dashboards.\nfunc (db *DB) GetDashboards() ([]Dashboard, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, name, description, created_by, created_at, updated_at\n\t\t FROM dashboards ORDER BY updated_at DESC`,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get dashboards: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar dashboards []Dashboard\n\tfor rows.Next() {\n\t\tvar d Dashboard\n\t\tvar desc, createdBy sql.NullString\n\t\tif err := rows.Scan(&d.ID, &d.Name, &desc, &createdBy, &d.CreatedAt, &d.UpdatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan dashboard: %w\", err)\n\t\t}\n\t\td.Description = nullStringToPtr(desc)\n\t\td.CreatedBy = nullStringToPtr(createdBy)\n\t\tdashboards = append(dashboards, d)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate dashboard rows: %w\", err)\n\t}\n\treturn dashboards, nil\n}\n\n// GetDashboardByID retrieves a dashboard by ID.\nfunc (db *DB) GetDashboardByID(id string) (*Dashboard, error) {\n\trow := db.conn.QueryRow(\n\t\t`SELECT id, name, description, created_by, created_at, updated_at\n\t\t FROM dashboards WHERE id = ?`, id,\n\t)\n\n\tvar d Dashboard\n\tvar desc, createdBy sql.NullString\n\terr := row.Scan(&d.ID, &d.Name, &desc, &createdBy, &d.CreatedAt, &d.UpdatedAt)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get dashboard by id: %w\", err)\n\t}\n\td.Description = nullStringToPtr(desc)\n\td.CreatedBy = nullStringToPtr(createdBy)\n\treturn &d, nil\n}\n\n// CreateDashboard creates a new dashboard and returns its ID.\nfunc (db *DB) CreateDashboard(name, description, createdBy string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar desc, creator interface{}\n\tif description != \"\" {\n\t\tdesc = description\n\t}\n\tif createdBy != \"\" {\n\t\tcreator = createdBy\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO dashboards (id, name, description, created_by, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?)`,\n\t\tid, name, desc, creator, now, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create dashboard: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdateDashboard updates a dashboard's name and description.\nfunc (db *DB) UpdateDashboard(id, name, description string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar desc interface{}\n\tif description != \"\" {\n\t\tdesc = description\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t\"UPDATE dashboards SET name = ?, description = ?, updated_at = ? WHERE id = ?\",\n\t\tname, desc, now, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update dashboard: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteDashboard deletes a dashboard and all its panels (cascade).\nfunc (db *DB) DeleteDashboard(id string) error {\n\t_, err := db.conn.Exec(\"DELETE FROM dashboards WHERE id = ?\", id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete dashboard: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetPanelsByDashboard retrieves all panels for a dashboard.\nfunc (db *DB) GetPanelsByDashboard(dashboardID string) ([]Panel, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, dashboard_id, name, panel_type, query, connection_id, config, layout_x, layout_y, layout_w, layout_h, created_at, updated_at\n\t\t FROM panels WHERE dashboard_id = ? ORDER BY layout_y ASC, layout_x ASC`,\n\t\tdashboardID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get panels by dashboard: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar panels []Panel\n\tfor rows.Next() {\n\t\tvar p Panel\n\t\tvar connID sql.NullString\n\t\tif err := rows.Scan(&p.ID, &p.DashboardID, &p.Name, &p.PanelType, &p.Query, &connID, &p.Config, &p.LayoutX, &p.LayoutY, &p.LayoutW, &p.LayoutH, &p.CreatedAt, &p.UpdatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan panel: %w\", err)\n\t\t}\n\t\tp.ConnectionID = nullStringToPtr(connID)\n\t\tpanels = append(panels, p)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate panel rows: %w\", err)\n\t}\n\treturn panels, nil\n}\n\n// GetPanelByID retrieves a panel by ID.\nfunc (db *DB) GetPanelByID(id string) (*Panel, error) {\n\trow := db.conn.QueryRow(\n\t\t`SELECT id, dashboard_id, name, panel_type, query, connection_id, config, layout_x, layout_y, layout_w, layout_h, created_at, updated_at\n\t\t FROM panels WHERE id = ?`, id,\n\t)\n\n\tvar p Panel\n\tvar connID sql.NullString\n\terr := row.Scan(&p.ID, &p.DashboardID, &p.Name, &p.PanelType, &p.Query, &connID, &p.Config, &p.LayoutX, &p.LayoutY, &p.LayoutW, &p.LayoutH, &p.CreatedAt, &p.UpdatedAt)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get panel by id: %w\", err)\n\t}\n\tp.ConnectionID = nullStringToPtr(connID)\n\treturn &p, nil\n}\n\n// CreatePanel creates a new panel and returns its ID.\nfunc (db *DB) CreatePanel(dashboardID, name, panelType, query, connectionID, config string, x, y, w, h int) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar connID interface{}\n\tif connectionID != \"\" {\n\t\tconnID = connectionID\n\t}\n\tif config == \"\" {\n\t\tconfig = \"{}\"\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO panels (id, dashboard_id, name, panel_type, query, connection_id, config, layout_x, layout_y, layout_w, layout_h, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tid, dashboardID, name, panelType, query, connID, config, x, y, w, h, now, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create panel: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdatePanel updates a panel.\nfunc (db *DB) UpdatePanel(id, name, panelType, query, connectionID, config string, x, y, w, h int) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar connID interface{}\n\tif connectionID != \"\" {\n\t\tconnID = connectionID\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`UPDATE panels SET name = ?, panel_type = ?, query = ?, connection_id = ?, config = ?, layout_x = ?, layout_y = ?, layout_w = ?, layout_h = ?, updated_at = ? WHERE id = ?`,\n\t\tname, panelType, query, connID, config, x, y, w, h, now, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update panel: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeletePanel deletes a panel by ID.\nfunc (db *DB) DeletePanel(id string) error {\n\t_, err := db.conn.Exec(\"DELETE FROM panels WHERE id = ?\", id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete panel: %w\", err)\n\t}\n\treturn nil\n}\n\ntype seededPanel struct {\n\tName      string\n\tPanelType string\n\tQuery     string\n\tConfig    string\n\tX         int\n\tY         int\n\tW         int\n\tH         int\n}\n\n// EnsureSystemOverviewDashboard creates or updates a built-in default dashboard\n// with operational ClickHouse metrics.\nfunc (db *DB) EnsureSystemOverviewDashboard() error {\n\ttx, err := db.conn.Begin()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"begin system dashboard transaction: %w\", err)\n\t}\n\tdefer tx.Rollback()\n\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tdashboardID := \"\"\n\n\tif err := tx.QueryRow(\n\t\t`SELECT id FROM dashboards WHERE created_by = ? LIMIT 1`,\n\t\tsystemDashboardCreatedBy,\n\t).Scan(&dashboardID); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\tdashboardID = uuid.NewString()\n\t\t\tif _, err := tx.Exec(\n\t\t\t\t`INSERT INTO dashboards (id, name, description, created_by, created_at, updated_at)\n\t\t\t\t VALUES (?, ?, ?, ?, ?, ?)`,\n\t\t\t\tdashboardID,\n\t\t\t\tsystemDashboardName,\n\t\t\t\tsystemDashboardDescription,\n\t\t\t\tsystemDashboardCreatedBy,\n\t\t\t\tnow,\n\t\t\t\tnow,\n\t\t\t); err != nil {\n\t\t\t\treturn fmt.Errorf(\"insert system dashboard: %w\", err)\n\t\t\t}\n\t\t} else {\n\t\t\treturn fmt.Errorf(\"get system dashboard: %w\", err)\n\t\t}\n\t} else {\n\t\tif _, err := tx.Exec(\n\t\t\t`UPDATE dashboards\n\t\t\t SET name = ?, description = ?, updated_at = ?\n\t\t\t WHERE id = ?`,\n\t\t\tsystemDashboardName,\n\t\t\tsystemDashboardDescription,\n\t\t\tnow,\n\t\t\tdashboardID,\n\t\t); err != nil {\n\t\t\treturn fmt.Errorf(\"update system dashboard metadata: %w\", err)\n\t\t}\n\t}\n\n\tpanels := []seededPanel{\n\t\t{\n\t\t\tName:      \"ClickHouse Version\",\n\t\t\tPanelType: \"stat\",\n\t\t\tQuery:     `SELECT version() AS version`,\n\t\t\tConfig:    `{\"chartType\":\"stat\"}`,\n\t\t\tX:         0, Y: 0, W: 2, H: 3,\n\t\t},\n\t\t{\n\t\t\tName:      \"Uptime (seconds)\",\n\t\t\tPanelType: \"stat\",\n\t\t\tQuery: `SELECT toUInt64(anyIf(value, metric = 'Uptime')) AS uptime_sec\nFROM system.asynchronous_metrics`,\n\t\t\tConfig: `{\"chartType\":\"stat\"}`,\n\t\t\tX:      2, Y: 0, W: 2, H: 3,\n\t\t},\n\t\t{\n\t\t\tName:      \"Active Queries\",\n\t\t\tPanelType: \"stat\",\n\t\t\tQuery:     `SELECT count() AS active_queries FROM system.processes`,\n\t\t\tConfig:    `{\"chartType\":\"stat\"}`,\n\t\t\tX:         4, Y: 0, W: 2, H: 3,\n\t\t},\n\t\t{\n\t\t\tName:      \"Connected Users\",\n\t\t\tPanelType: \"stat\",\n\t\t\tQuery:     `SELECT uniq(user) AS connected_users FROM system.processes`,\n\t\t\tConfig:    `{\"chartType\":\"stat\"}`,\n\t\t\tX:         6, Y: 0, W: 2, H: 3,\n\t\t},\n\t\t{\n\t\t\tName:      \"Databases\",\n\t\t\tPanelType: \"stat\",\n\t\t\tQuery:     `SELECT count() AS databases FROM system.databases`,\n\t\t\tConfig:    `{\"chartType\":\"stat\"}`,\n\t\t\tX:         8, Y: 0, W: 2, H: 3,\n\t\t},\n\t\t{\n\t\t\tName:      \"Tables\",\n\t\t\tPanelType: \"stat\",\n\t\t\tQuery:     `SELECT count() AS tables FROM system.tables WHERE database NOT IN ('system', 'INFORMATION_SCHEMA', 'information_schema')`,\n\t\t\tConfig:    `{\"chartType\":\"stat\"}`,\n\t\t\tX:         10, Y: 0, W: 2, H: 3,\n\t\t},\n\t\t{\n\t\t\tName:      \"Queries / interval\",\n\t\t\tPanelType: \"timeseries\",\n\t\t\tQuery: `SELECT\n  toStartOfInterval(event_time, INTERVAL $__interval second) AS ts,\n  count() AS queries\nFROM system.query_log\nWHERE type = 'QueryFinish'\n  AND $__timestamp(event_time)\nGROUP BY ts\nORDER BY ts`,\n\t\t\tConfig: `{\"chartType\":\"timeseries\",\"xColumn\":\"ts\",\"yColumns\":[\"queries\"],\"colors\":[\"#F97316\"],\"legendPosition\":\"bottom\"}`,\n\t\t\tX:      0, Y: 3, W: 4, H: 5,\n\t\t},\n\t\t{\n\t\t\tName:      \"P95 Query Latency (ms) / interval\",\n\t\t\tPanelType: \"timeseries\",\n\t\t\tQuery: `SELECT\n  toStartOfInterval(event_time, INTERVAL $__interval second) AS ts,\n  round(quantile(0.95)(query_duration_ms), 2) AS p95_ms\nFROM system.query_log\nWHERE type = 'QueryFinish'\n  AND $__timestamp(event_time)\nGROUP BY ts\nORDER BY ts`,\n\t\t\tConfig: `{\"chartType\":\"timeseries\",\"xColumn\":\"ts\",\"yColumns\":[\"p95_ms\"],\"colors\":[\"#EF4444\"],\"legendPosition\":\"bottom\"}`,\n\t\t\tX:      4, Y: 3, W: 4, H: 5,\n\t\t},\n\t\t{\n\t\t\tName:      \"Read MB / interval\",\n\t\t\tPanelType: \"timeseries\",\n\t\t\tQuery: `SELECT\n  toStartOfInterval(event_time, INTERVAL $__interval second) AS ts,\n  round(sum(read_bytes) / 1048576, 2) AS read_mb\nFROM system.query_log\nWHERE type = 'QueryFinish'\n  AND $__timestamp(event_time)\nGROUP BY ts\nORDER BY ts`,\n\t\t\tConfig: `{\"chartType\":\"timeseries\",\"xColumn\":\"ts\",\"yColumns\":[\"read_mb\"],\"colors\":[\"#10B981\"],\"legendPosition\":\"bottom\"}`,\n\t\t\tX:      8, Y: 3, W: 4, H: 5,\n\t\t},\n\t\t{\n\t\t\tName:      \"Top Slow Queries\",\n\t\t\tPanelType: \"table\",\n\t\t\tQuery: `SELECT\n  query_id,\n  user,\n  query_duration_ms,\n  read_rows,\n  formatReadableSize(read_bytes) AS read_size,\n  left(query, 120) AS sample_query\nFROM system.query_log\nWHERE type = 'QueryFinish'\n  AND $__timestamp(event_time)\nORDER BY query_duration_ms DESC\nLIMIT 50`,\n\t\t\tConfig: `{\"chartType\":\"table\"}`,\n\t\t\tX:      0, Y: 8, W: 7, H: 6,\n\t\t},\n\t\t{\n\t\t\tName:      \"Top Tables by Size\",\n\t\t\tPanelType: \"table\",\n\t\t\tQuery: `SELECT\n  concat(database, '.', table) AS table_name,\n  round(sum(bytes_on_disk) / 1048576, 2) AS size_mb,\n  sum(rows) AS rows\nFROM system.parts\nWHERE active\nGROUP BY table_name\nORDER BY size_mb DESC\nLIMIT 50`,\n\t\t\tConfig: `{\"chartType\":\"table\"}`,\n\t\t\tX:      7, Y: 8, W: 5, H: 6,\n\t\t},\n\t\t{\n\t\t\tName:      \"Disk Space\",\n\t\t\tPanelType: \"table\",\n\t\t\tQuery: `SELECT\n  name,\n  path,\n  round(total_space / 1073741824, 2) AS total_gb,\n  round(free_space / 1073741824, 2) AS free_gb,\n  round((total_space - free_space) / 1073741824, 2) AS used_gb\nFROM system.disks\nORDER BY used_gb DESC`,\n\t\t\tConfig: `{\"chartType\":\"table\"}`,\n\t\t\tX:      0, Y: 14, W: 6, H: 5,\n\t\t},\n\t\t{\n\t\t\tName:      \"Background Operations\",\n\t\t\tPanelType: \"table\",\n\t\t\tQuery: `SELECT 'merges_running' AS metric, count() AS value FROM system.merges\nUNION ALL\nSELECT 'mutations_pending' AS metric, countIf(is_done = 0) AS value FROM system.mutations\nUNION ALL\nSELECT 'replication_queue' AS metric, sum(queue_size) AS value FROM system.replicas`,\n\t\t\tConfig: `{\"chartType\":\"table\"}`,\n\t\t\tX:      6, Y: 14, W: 6, H: 5,\n\t\t},\n\t}\n\n\texisting := map[string]string{}\n\trows, err := tx.Query(`SELECT id, name FROM panels WHERE dashboard_id = ?`, dashboardID)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"list existing system panels: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tfor rows.Next() {\n\t\tvar panelID, name string\n\t\tif err := rows.Scan(&panelID, &name); err != nil {\n\t\t\treturn fmt.Errorf(\"scan existing system panel: %w\", err)\n\t\t}\n\t\texisting[name] = panelID\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn fmt.Errorf(\"iterate existing system panels: %w\", err)\n\t}\n\n\tfor _, p := range panels {\n\t\tif panelID, ok := existing[p.Name]; ok {\n\t\t\tif _, err := tx.Exec(\n\t\t\t\t`UPDATE panels\n\t\t\t\t SET panel_type = ?, query = ?, connection_id = NULL, config = ?,\n\t\t\t\t     layout_x = ?, layout_y = ?, layout_w = ?, layout_h = ?, updated_at = ?\n\t\t\t\t WHERE id = ?`,\n\t\t\t\tp.PanelType,\n\t\t\t\tp.Query,\n\t\t\t\tp.Config,\n\t\t\t\tp.X,\n\t\t\t\tp.Y,\n\t\t\t\tp.W,\n\t\t\t\tp.H,\n\t\t\t\tnow,\n\t\t\t\tpanelID,\n\t\t\t); err != nil {\n\t\t\t\treturn fmt.Errorf(\"update system panel %q: %w\", p.Name, err)\n\t\t\t}\n\t\t} else {\n\t\t\tif _, err := tx.Exec(\n\t\t\t\t`INSERT INTO panels (\n\t\t\t\t\tid, dashboard_id, name, panel_type, query, connection_id, config,\n\t\t\t\t\tlayout_x, layout_y, layout_w, layout_h, created_at, updated_at\n\t\t\t\t)\n\t\t\t\tVALUES (?, ?, ?, ?, ?, NULL, ?, ?, ?, ?, ?, ?, ?)`,\n\t\t\t\tuuid.NewString(),\n\t\t\t\tdashboardID,\n\t\t\t\tp.Name,\n\t\t\t\tp.PanelType,\n\t\t\t\tp.Query,\n\t\t\t\tp.Config,\n\t\t\t\tp.X,\n\t\t\t\tp.Y,\n\t\t\t\tp.W,\n\t\t\t\tp.H,\n\t\t\t\tnow,\n\t\t\t\tnow,\n\t\t\t); err != nil {\n\t\t\t\treturn fmt.Errorf(\"insert system panel %q: %w\", p.Name, err)\n\t\t\t}\n\t\t}\n\t}\n\n\tif err := tx.Commit(); err != nil {\n\t\treturn fmt.Errorf(\"commit system dashboard seed: %w\", err)\n\t}\n\n\treturn nil\n}\n"
  },
  {
    "path": "internal/database/database.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"strconv\"\n\n\t_ \"modernc.org/sqlite\"\n)\n\n// nullStringToPtr converts a sql.NullString to a *string (nil if not valid).\nfunc nullStringToPtr(ns sql.NullString) *string {\n\tif ns.Valid {\n\t\treturn &ns.String\n\t}\n\treturn nil\n}\n\n// DB wraps the SQLite connection.\ntype DB struct {\n\tconn *sql.DB\n\tpath string\n}\n\n// Open opens the SQLite database at the given path, runs migrations, and returns a DB.\nfunc Open(path string) (*DB, error) {\n\t// Ensure directory exists\n\tdir := filepath.Dir(path)\n\tif dir != \"\" && dir != \".\" {\n\t\tif err := os.MkdirAll(dir, 0755); err != nil {\n\t\t\tslog.Warn(\"Could not create database directory\", \"dir\", dir, \"error\", err)\n\t\t}\n\t}\n\n\tdsn := fmt.Sprintf(\"%s?_pragma=foreign_keys(1)&_pragma=journal_mode(wal)&_pragma=busy_timeout(5000)\", path)\n\tconn, err := sql.Open(\"sqlite\", dsn)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"open sqlite: %w\", err)\n\t}\n\n\t// SQLite is single-writer, but WAL allows concurrent readers.\n\t// Keep a small pool so reads (session/token checks) are not blocked by long sync writes.\n\tmaxOpenConns := 8\n\tif raw := os.Getenv(\"CHUI_SQLITE_MAX_OPEN_CONNS\"); raw != \"\" {\n\t\tif parsed, parseErr := strconv.Atoi(raw); parseErr == nil && parsed > 0 {\n\t\t\tmaxOpenConns = parsed\n\t\t}\n\t}\n\tconn.SetMaxOpenConns(maxOpenConns)\n\tconn.SetMaxIdleConns(maxOpenConns)\n\n\t// Verify connection\n\tif err := conn.Ping(); err != nil {\n\t\tconn.Close()\n\t\treturn nil, fmt.Errorf(\"ping sqlite: %w\", err)\n\t}\n\n\tdb := &DB{conn: conn, path: path}\n\n\t// Run migrations\n\tif err := db.runMigrations(); err != nil {\n\t\tconn.Close()\n\t\treturn nil, fmt.Errorf(\"migrations: %w\", err)\n\t}\n\n\tslog.Info(\"Database initialized\", \"path\", path)\n\treturn db, nil\n}\n\n// Close closes the database connection.\nfunc (db *DB) Close() error {\n\treturn db.conn.Close()\n}\n\n// Conn returns the underlying sql.DB for advanced usage.\nfunc (db *DB) Conn() *sql.DB {\n\treturn db.conn\n}\n"
  },
  {
    "path": "internal/database/migrations.go",
    "content": "package database\n\nimport (\n\t\"fmt\"\n\t\"log/slog\"\n\t\"strings\"\n\n\t\"github.com/google/uuid\"\n)\n\nfunc (db *DB) runMigrations() error {\n\tslog.Info(\"Running database migrations...\")\n\n\tstmts := []string{\n\t\t// Installation settings (key-value store)\n\t\t`CREATE TABLE IF NOT EXISTS settings (\n\t\t\tkey TEXT PRIMARY KEY,\n\t\t\tvalue TEXT NOT NULL,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\n\t\t// Connections (replaces tunnel_connections, no org_id)\n\t\t`CREATE TABLE IF NOT EXISTS connections (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tname TEXT NOT NULL,\n\t\t\ttunnel_token TEXT UNIQUE NOT NULL,\n\t\t\tis_embedded INTEGER DEFAULT 0,\n\t\t\tstatus TEXT DEFAULT 'disconnected',\n\t\t\tlast_seen_at TEXT,\n\t\t\thost_info TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE UNIQUE INDEX IF NOT EXISTS idx_conn_token ON connections(tunnel_token)`,\n\n\t\t// Sessions (no org_id)\n\t\t`CREATE TABLE IF NOT EXISTS sessions (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tclickhouse_user TEXT NOT NULL,\n\t\t\tencrypted_password TEXT NOT NULL,\n\t\t\ttoken TEXT UNIQUE NOT NULL,\n\t\t\texpires_at TEXT NOT NULL,\n\t\t\tuser_role TEXT DEFAULT 'viewer',\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE UNIQUE INDEX IF NOT EXISTS idx_session_token ON sessions(token)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_session_conn ON sessions(connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_session_expires ON sessions(expires_at)`,\n\n\t\t// Rate limits\n\t\t`CREATE TABLE IF NOT EXISTS rate_limits (\n\t\t\tidentifier TEXT PRIMARY KEY,\n\t\t\ttype TEXT NOT NULL,\n\t\t\tattempts INTEGER NOT NULL DEFAULT 0,\n\t\t\tfirst_attempt_at TEXT NOT NULL,\n\t\t\tlocked_until TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_rate_limit_type ON rate_limits(type)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_rate_limit_locked ON rate_limits(locked_until)`,\n\n\t\t// User role overrides\n\t\t`CREATE TABLE IF NOT EXISTS user_roles (\n\t\t\tusername TEXT PRIMARY KEY,\n\t\t\trole TEXT NOT NULL DEFAULT 'viewer',\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\n\t\t// Saved queries (was in ClickHouse, now SQLite)\n\t\t`CREATE TABLE IF NOT EXISTS saved_queries (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tname TEXT NOT NULL,\n\t\t\tdescription TEXT,\n\t\t\tquery TEXT NOT NULL,\n\t\t\tconnection_id TEXT REFERENCES connections(id) ON DELETE SET NULL,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\n\t\t// Dashboards (was in ClickHouse, now SQLite)\n\t\t`CREATE TABLE IF NOT EXISTS dashboards (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tname TEXT NOT NULL,\n\t\t\tdescription TEXT,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\n\t\t// Dashboard panels\n\t\t`CREATE TABLE IF NOT EXISTS panels (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tdashboard_id TEXT NOT NULL REFERENCES dashboards(id) ON DELETE CASCADE,\n\t\t\tname TEXT NOT NULL,\n\t\t\tpanel_type TEXT NOT NULL DEFAULT 'table',\n\t\t\tquery TEXT NOT NULL,\n\t\t\tconnection_id TEXT REFERENCES connections(id) ON DELETE SET NULL,\n\t\t\tconfig TEXT DEFAULT '{}',\n\t\t\tlayout_x INTEGER DEFAULT 0,\n\t\t\tlayout_y INTEGER DEFAULT 0,\n\t\t\tlayout_w INTEGER DEFAULT 6,\n\t\t\tlayout_h INTEGER DEFAULT 4,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_panel_dashboard ON panels(dashboard_id)`,\n\n\t\t// Schedules (was in ClickHouse, now SQLite)\n\t\t`CREATE TABLE IF NOT EXISTS schedules (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tname TEXT NOT NULL,\n\t\t\tsaved_query_id TEXT REFERENCES saved_queries(id) ON DELETE CASCADE,\n\t\t\tconnection_id TEXT REFERENCES connections(id) ON DELETE SET NULL,\n\t\t\tcron TEXT NOT NULL,\n\t\t\ttimezone TEXT DEFAULT 'UTC',\n\t\t\tenabled INTEGER DEFAULT 1,\n\t\t\ttimeout_ms INTEGER DEFAULT 60000,\n\t\t\tlast_run_at TEXT,\n\t\t\tnext_run_at TEXT,\n\t\t\tlast_status TEXT,\n\t\t\tlast_error TEXT,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\n\t\t// Schedule runs\n\t\t`CREATE TABLE IF NOT EXISTS schedule_runs (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tschedule_id TEXT NOT NULL REFERENCES schedules(id) ON DELETE CASCADE,\n\t\t\tstarted_at TEXT NOT NULL,\n\t\t\tfinished_at TEXT,\n\t\t\tstatus TEXT NOT NULL,\n\t\t\trows_affected INTEGER DEFAULT 0,\n\t\t\telapsed_ms INTEGER DEFAULT 0,\n\t\t\terror TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_sched_run_schedule ON schedule_runs(schedule_id)`,\n\n\t\t// Audit logs (was stub, now real)\n\t\t`CREATE TABLE IF NOT EXISTS audit_logs (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\taction TEXT NOT NULL,\n\t\t\tusername TEXT,\n\t\t\tconnection_id TEXT,\n\t\t\tdetails TEXT,\n\t\t\tip_address TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_audit_created ON audit_logs(created_at)`,\n\n\t\t// Brain providers (admin-managed)\n\t\t`CREATE TABLE IF NOT EXISTS brain_providers (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tname TEXT NOT NULL,\n\t\t\tkind TEXT NOT NULL,\n\t\t\tbase_url TEXT,\n\t\t\tencrypted_api_key TEXT,\n\t\t\tis_active INTEGER DEFAULT 1,\n\t\t\tis_default INTEGER DEFAULT 0,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_brain_provider_active ON brain_providers(is_active)`,\n\n\t\t// Brain models by provider\n\t\t`CREATE TABLE IF NOT EXISTS brain_models (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tprovider_id TEXT NOT NULL REFERENCES brain_providers(id) ON DELETE CASCADE,\n\t\t\tname TEXT NOT NULL,\n\t\t\tdisplay_name TEXT,\n\t\t\tis_active INTEGER DEFAULT 1,\n\t\t\tis_default INTEGER DEFAULT 0,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tUNIQUE(provider_id, name)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_brain_model_provider ON brain_models(provider_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_brain_model_active ON brain_models(is_active)`,\n\n\t\t// Brain chats\n\t\t`CREATE TABLE IF NOT EXISTS brain_chats (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tusername TEXT NOT NULL,\n\t\t\ttitle TEXT NOT NULL,\n\t\t\tprovider_id TEXT REFERENCES brain_providers(id) ON DELETE SET NULL,\n\t\t\tmodel_id TEXT REFERENCES brain_models(id) ON DELETE SET NULL,\n\t\t\tarchived INTEGER DEFAULT 0,\n\t\t\tlast_message_at TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_brain_chat_user ON brain_chats(username, connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_brain_chat_lastmsg ON brain_chats(last_message_at)`,\n\n\t\t// Brain messages\n\t\t`CREATE TABLE IF NOT EXISTS brain_messages (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tchat_id TEXT NOT NULL REFERENCES brain_chats(id) ON DELETE CASCADE,\n\t\t\trole TEXT NOT NULL,\n\t\t\tcontent TEXT NOT NULL,\n\t\t\tstatus TEXT NOT NULL DEFAULT 'complete',\n\t\t\terror TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_brain_msg_chat ON brain_messages(chat_id, created_at)`,\n\n\t\t// Brain artifacts\n\t\t`CREATE TABLE IF NOT EXISTS brain_artifacts (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tchat_id TEXT NOT NULL REFERENCES brain_chats(id) ON DELETE CASCADE,\n\t\t\tmessage_id TEXT REFERENCES brain_messages(id) ON DELETE SET NULL,\n\t\t\tartifact_type TEXT NOT NULL,\n\t\t\ttitle TEXT NOT NULL,\n\t\t\tcontent TEXT NOT NULL,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_brain_artifact_chat ON brain_artifacts(chat_id, created_at)`,\n\n\t\t// Brain tool call traces\n\t\t`CREATE TABLE IF NOT EXISTS brain_tool_calls (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tchat_id TEXT NOT NULL REFERENCES brain_chats(id) ON DELETE CASCADE,\n\t\t\tmessage_id TEXT NOT NULL REFERENCES brain_messages(id) ON DELETE CASCADE,\n\t\t\ttool_name TEXT NOT NULL,\n\t\t\tinput_json TEXT NOT NULL,\n\t\t\toutput_json TEXT NOT NULL,\n\t\t\tstatus TEXT NOT NULL,\n\t\t\terror TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_brain_tool_chat ON brain_tool_calls(chat_id, created_at)`,\n\n\t\t// Brain skills (admin-managed system prompts)\n\t\t`CREATE TABLE IF NOT EXISTS brain_skills (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tname TEXT NOT NULL,\n\t\t\tcontent TEXT NOT NULL,\n\t\t\tis_active INTEGER DEFAULT 1,\n\t\t\tis_default INTEGER DEFAULT 0,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_brain_skill_active ON brain_skills(is_active)`,\n\n\t\t// ══════════════════════════════════════════════════════════════\n\t\t// Governance tables (Pro feature)\n\t\t// ══════════════════════════════════════════════════════════════\n\n\t\t// Governance sync state (watermark tracking per connection)\n\t\t`CREATE TABLE IF NOT EXISTS gov_sync_state (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tsync_type TEXT NOT NULL,\n\t\t\tlast_synced_at TEXT,\n\t\t\twatermark TEXT,\n\t\t\tstatus TEXT DEFAULT 'idle',\n\t\t\tlast_error TEXT,\n\t\t\trow_count INTEGER DEFAULT 0,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tUNIQUE(connection_id, sync_type)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_sync_conn ON gov_sync_state(connection_id)`,\n\n\t\t// Governance databases\n\t\t`CREATE TABLE IF NOT EXISTS gov_databases (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tname TEXT NOT NULL,\n\t\t\tengine TEXT,\n\t\t\tfirst_seen TEXT NOT NULL,\n\t\t\tlast_updated TEXT NOT NULL,\n\t\t\tis_deleted INTEGER DEFAULT 0,\n\t\t\tUNIQUE(connection_id, name)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_db_conn ON gov_databases(connection_id)`,\n\n\t\t// Governance tables\n\t\t`CREATE TABLE IF NOT EXISTS gov_tables (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tdatabase_name TEXT NOT NULL,\n\t\t\ttable_name TEXT NOT NULL,\n\t\t\tengine TEXT,\n\t\t\ttable_uuid TEXT,\n\t\t\ttotal_rows INTEGER DEFAULT 0,\n\t\t\ttotal_bytes INTEGER DEFAULT 0,\n\t\t\tpartition_count INTEGER DEFAULT 0,\n\t\t\tfirst_seen TEXT NOT NULL,\n\t\t\tlast_updated TEXT NOT NULL,\n\t\t\tis_deleted INTEGER DEFAULT 0,\n\t\t\tUNIQUE(connection_id, database_name, table_name)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_tbl_conn ON gov_tables(connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_tbl_db ON gov_tables(connection_id, database_name)`,\n\n\t\t// Governance columns\n\t\t`CREATE TABLE IF NOT EXISTS gov_columns (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tdatabase_name TEXT NOT NULL,\n\t\t\ttable_name TEXT NOT NULL,\n\t\t\tcolumn_name TEXT NOT NULL,\n\t\t\tcolumn_type TEXT NOT NULL,\n\t\t\tcolumn_position INTEGER DEFAULT 0,\n\t\t\tdefault_kind TEXT,\n\t\t\tdefault_expression TEXT,\n\t\t\tcomment TEXT,\n\t\t\tfirst_seen TEXT NOT NULL,\n\t\t\tlast_updated TEXT NOT NULL,\n\t\t\tis_deleted INTEGER DEFAULT 0,\n\t\t\tUNIQUE(connection_id, database_name, table_name, column_name)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_col_conn ON gov_columns(connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_col_tbl ON gov_columns(connection_id, database_name, table_name)`,\n\n\t\t// Governance schema changes\n\t\t`CREATE TABLE IF NOT EXISTS gov_schema_changes (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tchange_type TEXT NOT NULL,\n\t\t\tdatabase_name TEXT NOT NULL,\n\t\t\ttable_name TEXT,\n\t\t\tcolumn_name TEXT,\n\t\t\told_value TEXT,\n\t\t\tnew_value TEXT,\n\t\t\tdetected_at TEXT NOT NULL,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_schema_conn ON gov_schema_changes(connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_schema_time ON gov_schema_changes(connection_id, detected_at)`,\n\n\t\t// Governance query log\n\t\t`CREATE TABLE IF NOT EXISTS gov_query_log (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tquery_id TEXT NOT NULL,\n\t\t\tch_user TEXT NOT NULL,\n\t\t\tquery_text TEXT NOT NULL,\n\t\t\tnormalized_hash TEXT,\n\t\t\tquery_kind TEXT,\n\t\t\tevent_time TEXT NOT NULL,\n\t\t\tduration_ms INTEGER DEFAULT 0,\n\t\t\tread_rows INTEGER DEFAULT 0,\n\t\t\tread_bytes INTEGER DEFAULT 0,\n\t\t\tresult_rows INTEGER DEFAULT 0,\n\t\t\twritten_rows INTEGER DEFAULT 0,\n\t\t\twritten_bytes INTEGER DEFAULT 0,\n\t\t\tmemory_usage INTEGER DEFAULT 0,\n\t\t\ttables_used TEXT,\n\t\t\tis_error INTEGER DEFAULT 0,\n\t\t\terror_message TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tUNIQUE(connection_id, query_id)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_qlog_conn ON gov_query_log(connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_qlog_time ON gov_query_log(connection_id, event_time)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_qlog_user ON gov_query_log(connection_id, ch_user)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_qlog_hash ON gov_query_log(connection_id, normalized_hash)`,\n\n\t\t// Governance lineage edges\n\t\t`CREATE TABLE IF NOT EXISTS gov_lineage_edges (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tsource_database TEXT NOT NULL,\n\t\t\tsource_table TEXT NOT NULL,\n\t\t\ttarget_database TEXT NOT NULL,\n\t\t\ttarget_table TEXT NOT NULL,\n\t\t\tquery_id TEXT,\n\t\t\tch_user TEXT,\n\t\t\tedge_type TEXT NOT NULL,\n\t\t\tdetected_at TEXT NOT NULL,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_lineage_conn ON gov_lineage_edges(connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_lineage_src ON gov_lineage_edges(connection_id, source_database, source_table)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_lineage_tgt ON gov_lineage_edges(connection_id, target_database, target_table)`,\n\n\t\t// Governance column-level lineage edges\n\t\t`CREATE TABLE IF NOT EXISTS gov_lineage_column_edges (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tlineage_edge_id TEXT NOT NULL REFERENCES gov_lineage_edges(id) ON DELETE CASCADE,\n\t\t\tconnection_id TEXT NOT NULL,\n\t\t\tsource_column TEXT NOT NULL,\n\t\t\ttarget_column TEXT NOT NULL,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tUNIQUE(lineage_edge_id, source_column, target_column)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_col_lineage_edge ON gov_lineage_column_edges(lineage_edge_id)`,\n\n\t\t// Governance sensitivity tags\n\t\t`CREATE TABLE IF NOT EXISTS gov_tags (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tobject_type TEXT NOT NULL,\n\t\t\tdatabase_name TEXT NOT NULL,\n\t\t\ttable_name TEXT NOT NULL,\n\t\t\tcolumn_name TEXT NOT NULL DEFAULT '',\n\t\t\ttag TEXT NOT NULL,\n\t\t\ttagged_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tUNIQUE(connection_id, object_type, database_name, table_name, column_name, tag)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_tag_conn ON gov_tags(connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_tag_obj ON gov_tags(connection_id, database_name, table_name)`,\n\n\t\t// Governance ClickHouse users\n\t\t`CREATE TABLE IF NOT EXISTS gov_ch_users (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tname TEXT NOT NULL,\n\t\t\tauth_type TEXT,\n\t\t\thost_ip TEXT,\n\t\t\tdefault_roles TEXT,\n\t\t\tfirst_seen TEXT NOT NULL,\n\t\t\tlast_updated TEXT NOT NULL,\n\t\t\tUNIQUE(connection_id, name)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_chuser_conn ON gov_ch_users(connection_id)`,\n\n\t\t// Governance ClickHouse roles\n\t\t`CREATE TABLE IF NOT EXISTS gov_ch_roles (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tname TEXT NOT NULL,\n\t\t\tfirst_seen TEXT NOT NULL,\n\t\t\tlast_updated TEXT NOT NULL,\n\t\t\tUNIQUE(connection_id, name)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_chrole_conn ON gov_ch_roles(connection_id)`,\n\n\t\t// Governance role grants\n\t\t`CREATE TABLE IF NOT EXISTS gov_role_grants (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tuser_name TEXT NOT NULL,\n\t\t\tgranted_role_name TEXT NOT NULL,\n\t\t\tis_default INTEGER DEFAULT 0,\n\t\t\twith_admin_option INTEGER DEFAULT 0,\n\t\t\tfirst_seen TEXT NOT NULL,\n\t\t\tlast_updated TEXT NOT NULL,\n\t\t\tUNIQUE(connection_id, user_name, granted_role_name)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_rolegrant_conn ON gov_role_grants(connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_rolegrant_user ON gov_role_grants(connection_id, user_name)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_rolegrant_role ON gov_role_grants(connection_id, granted_role_name)`,\n\n\t\t// Governance grants\n\t\t`CREATE TABLE IF NOT EXISTS gov_grants (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tuser_name TEXT,\n\t\t\trole_name TEXT,\n\t\t\taccess_type TEXT NOT NULL,\n\t\t\tgrant_database TEXT,\n\t\t\tgrant_table TEXT,\n\t\t\tgrant_column TEXT,\n\t\t\tis_partial_revoke INTEGER DEFAULT 0,\n\t\t\tgrant_option INTEGER DEFAULT 0,\n\t\t\tfirst_seen TEXT NOT NULL,\n\t\t\tlast_updated TEXT NOT NULL\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_grant_conn ON gov_grants(connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_grant_user ON gov_grants(connection_id, user_name)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_grant_role ON gov_grants(connection_id, role_name)`,\n\n\t\t// Governance access matrix (materialized)\n\t\t`CREATE TABLE IF NOT EXISTS gov_access_matrix (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tuser_name TEXT NOT NULL,\n\t\t\trole_name TEXT,\n\t\t\tdatabase_name TEXT,\n\t\t\ttable_name TEXT,\n\t\t\tprivilege TEXT NOT NULL,\n\t\t\tis_direct_grant INTEGER DEFAULT 0,\n\t\t\tlast_query_time TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_matrix_conn ON gov_access_matrix(connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_matrix_user ON gov_access_matrix(connection_id, user_name)`,\n\n\t\t// Governance policies\n\t\t`CREATE TABLE IF NOT EXISTS gov_policies (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tname TEXT NOT NULL,\n\t\t\tdescription TEXT,\n\t\t\tobject_type TEXT NOT NULL,\n\t\t\tobject_database TEXT,\n\t\t\tobject_table TEXT,\n\t\t\tobject_column TEXT,\n\t\t\trequired_role TEXT,\n\t\t\tseverity TEXT DEFAULT 'warn',\n\t\t\tenforcement_mode TEXT NOT NULL DEFAULT 'warn',\n\t\t\tenabled INTEGER DEFAULT 1,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_policy_conn ON gov_policies(connection_id)`,\n\n\t\t// Governance policy violations\n\t\t`CREATE TABLE IF NOT EXISTS gov_policy_violations (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tpolicy_id TEXT NOT NULL REFERENCES gov_policies(id) ON DELETE CASCADE,\n\t\t\tquery_log_id TEXT,\n\t\t\tch_user TEXT NOT NULL,\n\t\t\tviolation_detail TEXT,\n\t\t\tseverity TEXT NOT NULL,\n\t\t\tdetection_phase TEXT NOT NULL DEFAULT 'post_exec',\n\t\t\trequest_endpoint TEXT,\n\t\t\tdetected_at TEXT NOT NULL,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_violation_conn ON gov_policy_violations(connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_violation_policy ON gov_policy_violations(policy_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_violation_time ON gov_policy_violations(connection_id, detected_at)`,\n\n\t\t// Governance object notes/comments (table/column level)\n\t\t`CREATE TABLE IF NOT EXISTS gov_object_comments (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tobject_type TEXT NOT NULL,\n\t\t\tdatabase_name TEXT NOT NULL,\n\t\t\ttable_name TEXT NOT NULL,\n\t\t\tcolumn_name TEXT NOT NULL DEFAULT '',\n\t\t\tcomment_text TEXT NOT NULL,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_comment_obj ON gov_object_comments(connection_id, object_type, database_name, table_name, column_name, created_at)`,\n\n\t\t// Governance incidents (Collibra-style workflow, simplified)\n\t\t`CREATE TABLE IF NOT EXISTS gov_incidents (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tsource_type TEXT NOT NULL DEFAULT 'manual',\n\t\t\tsource_ref TEXT,\n\t\t\tdedupe_key TEXT,\n\t\t\ttitle TEXT NOT NULL,\n\t\t\tseverity TEXT NOT NULL DEFAULT 'warn',\n\t\t\tstatus TEXT NOT NULL DEFAULT 'open',\n\t\t\tassignee TEXT,\n\t\t\tdetails TEXT,\n\t\t\tresolution_note TEXT,\n\t\t\toccurrence_count INTEGER NOT NULL DEFAULT 1,\n\t\t\tfirst_seen_at TEXT NOT NULL,\n\t\t\tlast_seen_at TEXT NOT NULL,\n\t\t\tresolved_at TEXT,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_incident_conn_status ON gov_incidents(connection_id, status, severity, last_seen_at)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_incident_source ON gov_incidents(connection_id, source_type, source_ref)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_incident_dedupe ON gov_incidents(connection_id, dedupe_key, status)`,\n\n\t\t`CREATE TABLE IF NOT EXISTS gov_incident_comments (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tincident_id TEXT NOT NULL REFERENCES gov_incidents(id) ON DELETE CASCADE,\n\t\t\tcomment_text TEXT NOT NULL,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_gov_incident_comment_incident ON gov_incident_comments(incident_id, created_at)`,\n\n\t\t// Alerting channels (SMTP/Resend/Brevo)\n\t\t`CREATE TABLE IF NOT EXISTS alert_channels (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tname TEXT NOT NULL,\n\t\t\tchannel_type TEXT NOT NULL,\n\t\t\tconfig_encrypted TEXT NOT NULL,\n\t\t\tis_active INTEGER DEFAULT 1,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_channel_active ON alert_channels(is_active)`,\n\t\t`CREATE UNIQUE INDEX IF NOT EXISTS idx_alert_channel_name_unique ON alert_channels(name)`,\n\n\t\t// Alert rules\n\t\t`CREATE TABLE IF NOT EXISTS alert_rules (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tname TEXT NOT NULL,\n\t\t\tevent_type TEXT NOT NULL,\n\t\t\tseverity_min TEXT NOT NULL DEFAULT 'warn',\n\t\t\tenabled INTEGER DEFAULT 1,\n\t\t\tcooldown_seconds INTEGER DEFAULT 300,\n\t\t\tmax_attempts INTEGER DEFAULT 5,\n\t\t\tsubject_template TEXT,\n\t\t\tbody_template TEXT,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_rule_enabled ON alert_rules(enabled)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_rule_event ON alert_rules(event_type, enabled)`,\n\n\t\t// Rule routes map rules to channels and recipients\n\t\t`CREATE TABLE IF NOT EXISTS alert_rule_routes (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\trule_id TEXT NOT NULL REFERENCES alert_rules(id) ON DELETE CASCADE,\n\t\t\tchannel_id TEXT NOT NULL REFERENCES alert_channels(id) ON DELETE CASCADE,\n\t\t\trecipients_json TEXT NOT NULL,\n\t\t\tis_active INTEGER DEFAULT 1,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_route_rule ON alert_rule_routes(rule_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_route_channel ON alert_rule_routes(channel_id)`,\n\n\t\t// Per-route delivery policy (digest/escalation metadata)\n\t\t`CREATE TABLE IF NOT EXISTS alert_route_policies (\n\t\t\troute_id TEXT PRIMARY KEY REFERENCES alert_rule_routes(id) ON DELETE CASCADE,\n\t\t\tdelivery_mode TEXT NOT NULL DEFAULT 'immediate',\n\t\t\tdigest_window_minutes INTEGER NOT NULL DEFAULT 0,\n\t\t\tescalation_channel_id TEXT REFERENCES alert_channels(id) ON DELETE SET NULL,\n\t\t\tescalation_recipients_json TEXT,\n\t\t\tescalation_after_failures INTEGER NOT NULL DEFAULT 0,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_route_policy_delivery ON alert_route_policies(delivery_mode, digest_window_minutes)`,\n\n\t\t// Alert events emitted by governance/scheduler/other subsystems\n\t\t`CREATE TABLE IF NOT EXISTS alert_events (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT,\n\t\t\tevent_type TEXT NOT NULL,\n\t\t\tseverity TEXT NOT NULL,\n\t\t\ttitle TEXT NOT NULL,\n\t\t\tmessage TEXT NOT NULL,\n\t\t\tpayload_json TEXT,\n\t\t\tfingerprint TEXT,\n\t\t\tsource_ref TEXT,\n\t\t\tstatus TEXT NOT NULL DEFAULT 'new',\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tprocessed_at TEXT\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_event_status ON alert_events(status, created_at)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_event_type ON alert_events(event_type, created_at)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_event_fingerprint ON alert_events(fingerprint, created_at)`,\n\n\t\t// Dispatch jobs generated from events and routes\n\t\t`CREATE TABLE IF NOT EXISTS alert_dispatch_jobs (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tevent_id TEXT NOT NULL REFERENCES alert_events(id) ON DELETE CASCADE,\n\t\t\trule_id TEXT NOT NULL REFERENCES alert_rules(id) ON DELETE CASCADE,\n\t\t\troute_id TEXT NOT NULL REFERENCES alert_rule_routes(id) ON DELETE CASCADE,\n\t\t\tchannel_id TEXT NOT NULL REFERENCES alert_channels(id) ON DELETE CASCADE,\n\t\t\tstatus TEXT NOT NULL DEFAULT 'queued',\n\t\t\tattempt_count INTEGER DEFAULT 0,\n\t\t\tmax_attempts INTEGER DEFAULT 5,\n\t\t\tnext_attempt_at TEXT NOT NULL,\n\t\t\tlast_error TEXT,\n\t\t\tprovider_message_id TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tsent_at TEXT\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_job_due ON alert_dispatch_jobs(status, next_attempt_at)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_job_event ON alert_dispatch_jobs(event_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_job_route ON alert_dispatch_jobs(route_id)`,\n\n\t\t// Digest windows for routes configured in digest mode\n\t\t`CREATE TABLE IF NOT EXISTS alert_route_digests (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\troute_id TEXT NOT NULL REFERENCES alert_rule_routes(id) ON DELETE CASCADE,\n\t\t\trule_id TEXT NOT NULL REFERENCES alert_rules(id) ON DELETE CASCADE,\n\t\t\tchannel_id TEXT NOT NULL REFERENCES alert_channels(id) ON DELETE CASCADE,\n\t\t\tbucket_start TEXT NOT NULL,\n\t\t\tbucket_end TEXT NOT NULL,\n\t\t\tevent_type TEXT NOT NULL,\n\t\t\tseverity TEXT NOT NULL,\n\t\t\tevent_count INTEGER NOT NULL DEFAULT 0,\n\t\t\tevent_ids_json TEXT NOT NULL,\n\t\t\ttitles_json TEXT NOT NULL,\n\t\t\tstatus TEXT NOT NULL DEFAULT 'collecting',\n\t\t\tattempt_count INTEGER NOT NULL DEFAULT 0,\n\t\t\tmax_attempts INTEGER NOT NULL DEFAULT 5,\n\t\t\tnext_attempt_at TEXT NOT NULL,\n\t\t\tlast_error TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tsent_at TEXT,\n\t\t\tUNIQUE(route_id, bucket_start, event_type, severity)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_digest_due ON alert_route_digests(status, next_attempt_at, bucket_end)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_alert_digest_route ON alert_route_digests(route_id, bucket_start)`,\n\n\t\t// ══════════════════════════════════════════════════════════════\n\t\t// Pipeline tables (data ingestion pipelines)\n\t\t// ══════════════════════════════════════════════════════════════\n\n\t\t// Pipeline definitions\n\t\t`CREATE TABLE IF NOT EXISTS pipelines (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tname TEXT NOT NULL,\n\t\t\tdescription TEXT,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tstatus TEXT NOT NULL DEFAULT 'draft',\n\t\t\tconfig TEXT NOT NULL DEFAULT '{}',\n\t\t\tcreated_by TEXT,\n\t\t\tlast_started_at TEXT,\n\t\t\tlast_stopped_at TEXT,\n\t\t\tlast_error TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_pipeline_conn ON pipelines(connection_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_pipeline_status ON pipelines(status)`,\n\n\t\t// Pipeline graph nodes (sources and sinks)\n\t\t`CREATE TABLE IF NOT EXISTS pipeline_nodes (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tpipeline_id TEXT NOT NULL REFERENCES pipelines(id) ON DELETE CASCADE,\n\t\t\tnode_type TEXT NOT NULL,\n\t\t\tlabel TEXT NOT NULL,\n\t\t\tposition_x REAL NOT NULL DEFAULT 0,\n\t\t\tposition_y REAL NOT NULL DEFAULT 0,\n\t\t\tconfig_encrypted TEXT NOT NULL DEFAULT '{}',\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_pipeline_node_pipeline ON pipeline_nodes(pipeline_id)`,\n\n\t\t// Pipeline graph edges (connections between nodes)\n\t\t`CREATE TABLE IF NOT EXISTS pipeline_edges (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tpipeline_id TEXT NOT NULL REFERENCES pipelines(id) ON DELETE CASCADE,\n\t\t\tsource_node_id TEXT NOT NULL REFERENCES pipeline_nodes(id) ON DELETE CASCADE,\n\t\t\ttarget_node_id TEXT NOT NULL REFERENCES pipeline_nodes(id) ON DELETE CASCADE,\n\t\t\tsource_handle TEXT,\n\t\t\ttarget_handle TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tUNIQUE(pipeline_id, source_node_id, target_node_id)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_pipeline_edge_pipeline ON pipeline_edges(pipeline_id)`,\n\n\t\t// Pipeline execution runs\n\t\t`CREATE TABLE IF NOT EXISTS pipeline_runs (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tpipeline_id TEXT NOT NULL REFERENCES pipelines(id) ON DELETE CASCADE,\n\t\t\tstatus TEXT NOT NULL DEFAULT 'running',\n\t\t\tstarted_at TEXT NOT NULL,\n\t\t\tfinished_at TEXT,\n\t\t\trows_ingested INTEGER DEFAULT 0,\n\t\t\tbytes_ingested INTEGER DEFAULT 0,\n\t\t\terrors_count INTEGER DEFAULT 0,\n\t\t\tlast_error TEXT,\n\t\t\tmetrics_json TEXT DEFAULT '{}',\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_pipeline_run_pipeline ON pipeline_runs(pipeline_id)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_pipeline_run_started ON pipeline_runs(pipeline_id, started_at)`,\n\n\t\t// Pipeline run logs\n\t\t`CREATE TABLE IF NOT EXISTS pipeline_run_logs (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\trun_id TEXT NOT NULL REFERENCES pipeline_runs(id) ON DELETE CASCADE,\n\t\t\tlevel TEXT NOT NULL DEFAULT 'info',\n\t\t\tmessage TEXT NOT NULL,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_pipeline_run_log_run ON pipeline_run_logs(run_id, created_at)`,\n\n\t\t// ── Models (dbt-like SQL transformations) ─────────────────────────\n\t\t`CREATE TABLE IF NOT EXISTS models (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tname TEXT NOT NULL,\n\t\t\tdescription TEXT DEFAULT '',\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\ttarget_database TEXT NOT NULL DEFAULT 'default',\n\t\t\tmaterialization TEXT NOT NULL DEFAULT 'view',\n\t\t\tsql_body TEXT NOT NULL DEFAULT '',\n\t\t\ttable_engine TEXT NOT NULL DEFAULT 'MergeTree',\n\t\t\torder_by TEXT NOT NULL DEFAULT 'tuple()',\n\t\t\tstatus TEXT NOT NULL DEFAULT 'draft',\n\t\t\tlast_error TEXT,\n\t\t\tlast_run_at TEXT,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tUNIQUE(connection_id, name)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_model_conn ON models(connection_id)`,\n\n\t\t`CREATE TABLE IF NOT EXISTS model_runs (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tstatus TEXT NOT NULL DEFAULT 'running',\n\t\t\ttotal_models INTEGER NOT NULL DEFAULT 0,\n\t\t\tsucceeded INTEGER NOT NULL DEFAULT 0,\n\t\t\tfailed INTEGER NOT NULL DEFAULT 0,\n\t\t\tskipped INTEGER NOT NULL DEFAULT 0,\n\t\t\tstarted_at TEXT NOT NULL,\n\t\t\tfinished_at TEXT,\n\t\t\ttriggered_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_model_run_conn ON model_runs(connection_id, started_at)`,\n\n\t\t`CREATE TABLE IF NOT EXISTS model_run_results (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\trun_id TEXT NOT NULL REFERENCES model_runs(id) ON DELETE CASCADE,\n\t\t\tmodel_id TEXT NOT NULL REFERENCES models(id) ON DELETE CASCADE,\n\t\t\tmodel_name TEXT NOT NULL,\n\t\t\tstatus TEXT NOT NULL DEFAULT 'pending',\n\t\t\tresolved_sql TEXT,\n\t\t\telapsed_ms INTEGER DEFAULT 0,\n\t\t\terror TEXT,\n\t\t\tstarted_at TEXT,\n\t\t\tfinished_at TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_model_result_run ON model_run_results(run_id)`,\n\n\t\t`CREATE TABLE IF NOT EXISTS model_schedules (\n\t\t\tid TEXT PRIMARY KEY,\n\t\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\t\tanchor_model_id TEXT REFERENCES models(id) ON DELETE CASCADE,\n\t\t\tcron TEXT NOT NULL,\n\t\t\tenabled INTEGER NOT NULL DEFAULT 1,\n\t\t\tlast_run_at TEXT,\n\t\t\tnext_run_at TEXT,\n\t\t\tlast_status TEXT,\n\t\t\tlast_error TEXT,\n\t\t\tcreated_by TEXT,\n\t\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\t\tUNIQUE(connection_id, anchor_model_id)\n\t\t)`,\n\t\t`CREATE INDEX IF NOT EXISTS idx_model_sched_conn ON model_schedules(connection_id)`,\n\t}\n\n\tfor _, stmt := range stmts {\n\t\tif _, err := db.conn.Exec(stmt); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// Migrate model_schedules: add anchor_model_id column if missing\n\tif err := db.migrateModelSchedulesAnchor(); err != nil {\n\t\treturn fmt.Errorf(\"migrate model_schedules anchor: %w\", err)\n\t}\n\n\tif err := db.ensureColumn(\"gov_policies\", \"enforcement_mode\", \"TEXT NOT NULL DEFAULT 'warn'\"); err != nil {\n\t\treturn err\n\t}\n\tif err := db.ensureColumn(\"gov_policy_violations\", \"detection_phase\", \"TEXT NOT NULL DEFAULT 'post_exec'\"); err != nil {\n\t\treturn err\n\t}\n\tif err := db.ensureColumn(\"gov_policy_violations\", \"request_endpoint\", \"TEXT\"); err != nil {\n\t\treturn err\n\t}\n\tif err := db.ensureColumn(\"brain_chats\", \"context_database\", \"TEXT\"); err != nil {\n\t\treturn err\n\t}\n\tif err := db.ensureColumn(\"brain_chats\", \"context_table\", \"TEXT\"); err != nil {\n\t\treturn err\n\t}\n\tif err := db.ensureColumn(\"brain_chats\", \"context_tables\", \"TEXT\"); err != nil {\n\t\treturn err\n\t}\n\n\t// Drop legacy tables from the old SaaS schema\n\tdropLegacy := []string{\n\t\t\"DROP TABLE IF EXISTS organizations\",\n\t\t\"DROP TABLE IF EXISTS tunnel_connections\",\n\t\t\"DROP TABLE IF EXISTS cloud_sessions\",\n\t\t\"DROP TABLE IF EXISTS scheduled_runs\",\n\t\t\"DROP TABLE IF EXISTS scheduled_jobs\",\n\t\t\"DROP TABLE IF EXISTS cloud_saved_queries\",\n\t\t\"DROP TABLE IF EXISTS cloud_panels\",\n\t\t\"DROP TABLE IF EXISTS cloud_dashboards\",\n\t\t\"DROP TABLE IF EXISTS cloud_user_roles\",\n\t\t\"DROP TABLE IF EXISTS beta_applications\",\n\t\t\"DROP TABLE IF EXISTS cloud_audit_logs\",\n\t}\n\tfor _, stmt := range dropLegacy {\n\t\tif _, err := db.conn.Exec(stmt); err != nil {\n\t\t\tslog.Warn(\"Failed to drop legacy table\", \"error\", err)\n\t\t}\n\t}\n\n\t// Seed installation_id if not present\n\tvar count int\n\tif err := db.conn.QueryRow(\"SELECT COUNT(*) FROM settings WHERE key = 'installation_id'\").Scan(&count); err == nil && count == 0 {\n\t\tdb.conn.Exec(\"INSERT INTO settings (key, value) VALUES ('installation_id', ?)\", uuid.NewString())\n\t\tslog.Info(\"Generated new installation ID\")\n\t}\n\n\t// Seed default Brain skill if not present.\n\tif err := db.conn.QueryRow(\"SELECT COUNT(*) FROM brain_skills\").Scan(&count); err == nil && count == 0 {\n\t\tnow := \"CURRENT_TIMESTAMP\"\n\t\tdb.conn.Exec(`INSERT INTO brain_skills (id, name, content, is_active, is_default, created_by, created_at, updated_at)\n\t\t\tVALUES (?, ?, ?, 1, 1, 'system', `+now+`, `+now+`)`,\n\t\t\tuuid.NewString(),\n\t\t\t\"Default Brain Skill\",\n\t\t\t`You are Brain, a senior ClickHouse copilot.\n\nPriorities:\n- Give correct SQL first, concise explanation second.\n- Keep queries safe and cost-aware: start with LIMIT 100 unless user asks otherwise.\n- Prefer explicit columns over SELECT * on large tables.\n- Use only schema fields known in context; if missing, ask a short clarifying question.\n- When uncertain, provide assumptions clearly.\n\nArtifacts:\n- When sharing SQL, return a runnable SQL block.\n- If a query result artifact exists, reference it by title and summarize key findings in bullets.\n- For follow-ups, reuse prior artifacts/chats when relevant.\n\nTool behavior:\n- Read-only queries by default.\n- Never execute DDL/DROP/TRUNCATE/ALTER unless user explicitly asks and confirms.\n- For expensive requests, propose a lightweight preview query first.\n\nFormatting:\n1) One-line intent acknowledgment.\n2) SQL in a fenced sql block.\n3) Short explanation and optional next-step variants.`,\n\t\t)\n\t}\n\n\tslog.Info(\"Database migrations completed\")\n\treturn nil\n}\n\n// migrateModelSchedulesAnchor detects old model_schedules without anchor_model_id\n// and migrates data to the new schema.\nfunc (db *DB) migrateModelSchedulesAnchor() error {\n\t// Check if anchor_model_id column already exists\n\trows, err := db.conn.Query(\"PRAGMA table_info(model_schedules)\")\n\tif err != nil {\n\t\treturn nil // table may not exist yet\n\t}\n\tdefer rows.Close()\n\n\thasAnchor := false\n\tfor rows.Next() {\n\t\tvar cid int\n\t\tvar name, colType string\n\t\tvar notNull, pk int\n\t\tvar dfltValue interface{}\n\t\tif err := rows.Scan(&cid, &name, &colType, &notNull, &dfltValue, &pk); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif strings.EqualFold(strings.TrimSpace(name), \"anchor_model_id\") {\n\t\t\thasAnchor = true\n\t\t}\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn err\n\t}\n\n\tif hasAnchor {\n\t\treturn nil // already migrated\n\t}\n\n\tslog.Info(\"Migrating model_schedules to add anchor_model_id\")\n\n\t// Rename old table\n\tif _, err := db.conn.Exec(\"ALTER TABLE model_schedules RENAME TO model_schedules_old\"); err != nil {\n\t\treturn fmt.Errorf(\"rename old table: %w\", err)\n\t}\n\n\t// Create new table\n\tif _, err := db.conn.Exec(`CREATE TABLE model_schedules (\n\t\tid TEXT PRIMARY KEY,\n\t\tconnection_id TEXT NOT NULL REFERENCES connections(id) ON DELETE CASCADE,\n\t\tanchor_model_id TEXT REFERENCES models(id) ON DELETE CASCADE,\n\t\tcron TEXT NOT NULL,\n\t\tenabled INTEGER NOT NULL DEFAULT 1,\n\t\tlast_run_at TEXT,\n\t\tnext_run_at TEXT,\n\t\tlast_status TEXT,\n\t\tlast_error TEXT,\n\t\tcreated_by TEXT,\n\t\tcreated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\tupdated_at TEXT DEFAULT CURRENT_TIMESTAMP,\n\t\tUNIQUE(connection_id, anchor_model_id)\n\t)`); err != nil {\n\t\treturn fmt.Errorf(\"create new table: %w\", err)\n\t}\n\n\t// Copy data with backfill: pick the first model by name as anchor\n\tif _, err := db.conn.Exec(`INSERT INTO model_schedules\n\t\t(id, connection_id, anchor_model_id, cron, enabled, last_run_at, next_run_at,\n\t\t last_status, last_error, created_by, created_at, updated_at)\n\t\tSELECT s.id, s.connection_id,\n\t\t\t(SELECT m.id FROM models m WHERE m.connection_id = s.connection_id ORDER BY m.name ASC LIMIT 1),\n\t\t\ts.cron, s.enabled, s.last_run_at, s.next_run_at,\n\t\t\ts.last_status, s.last_error, s.created_by, s.created_at, s.updated_at\n\t\tFROM model_schedules_old s`); err != nil {\n\t\treturn fmt.Errorf(\"copy data: %w\", err)\n\t}\n\n\t// Drop old table\n\tif _, err := db.conn.Exec(\"DROP TABLE model_schedules_old\"); err != nil {\n\t\treturn fmt.Errorf(\"drop old table: %w\", err)\n\t}\n\n\t// Delete orphaned schedules with no anchor\n\tif _, err := db.conn.Exec(\"DELETE FROM model_schedules WHERE anchor_model_id IS NULL\"); err != nil {\n\t\treturn fmt.Errorf(\"delete orphans: %w\", err)\n\t}\n\n\t// Recreate index\n\tif _, err := db.conn.Exec(\"CREATE INDEX IF NOT EXISTS idx_model_sched_conn ON model_schedules(connection_id)\"); err != nil {\n\t\treturn fmt.Errorf(\"recreate index: %w\", err)\n\t}\n\n\tslog.Info(\"model_schedules migration complete\")\n\treturn nil\n}\n\nfunc (db *DB) ensureColumn(tableName, columnName, definition string) error {\n\trows, err := db.conn.Query(fmt.Sprintf(\"PRAGMA table_info(%s)\", tableName))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"inspect table %s columns: %w\", tableName, err)\n\t}\n\tdefer rows.Close()\n\n\tfor rows.Next() {\n\t\tvar cid int\n\t\tvar name, colType string\n\t\tvar notNull, pk int\n\t\tvar dfltValue interface{}\n\t\tif err := rows.Scan(&cid, &name, &colType, &notNull, &dfltValue, &pk); err != nil {\n\t\t\treturn fmt.Errorf(\"scan table info for %s: %w\", tableName, err)\n\t\t}\n\t\tif strings.EqualFold(strings.TrimSpace(name), strings.TrimSpace(columnName)) {\n\t\t\treturn nil\n\t\t}\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn fmt.Errorf(\"iterate table info for %s: %w\", tableName, err)\n\t}\n\n\tif _, err := db.conn.Exec(fmt.Sprintf(\"ALTER TABLE %s ADD COLUMN %s %s\", tableName, columnName, definition)); err != nil {\n\t\treturn fmt.Errorf(\"add column %s.%s: %w\", tableName, columnName, err)\n\t}\n\n\treturn nil\n}\n"
  },
  {
    "path": "internal/database/migrations_guardrails_test.go",
    "content": "package database\n\nimport \"testing\"\n\nfunc TestGuardrailColumnsExistAfterMigrations(t *testing.T) {\n\tdb := openTestDB(t)\n\n\tmustHaveColumn(t, db, \"gov_policies\", \"enforcement_mode\")\n\tmustHaveColumn(t, db, \"gov_policy_violations\", \"detection_phase\")\n\tmustHaveColumn(t, db, \"gov_policy_violations\", \"request_endpoint\")\n}\n\nfunc mustHaveColumn(t *testing.T, db *DB, tableName, columnName string) {\n\tt.Helper()\n\n\trows, err := db.conn.Query(\"PRAGMA table_info(\" + tableName + \")\")\n\tif err != nil {\n\t\tt.Fatalf(\"inspect table %s: %v\", tableName, err)\n\t}\n\tdefer rows.Close()\n\n\tfound := false\n\tfor rows.Next() {\n\t\tvar cid int\n\t\tvar name, typ string\n\t\tvar notNull, pk int\n\t\tvar defaultValue interface{}\n\t\tif err := rows.Scan(&cid, &name, &typ, &notNull, &defaultValue, &pk); err != nil {\n\t\t\tt.Fatalf(\"scan pragma for %s: %v\", tableName, err)\n\t\t}\n\t\tif name == columnName {\n\t\t\tfound = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif err := rows.Err(); err != nil {\n\t\tt.Fatalf(\"iterate pragma for %s: %v\", tableName, err)\n\t}\n\tif !found {\n\t\tt.Fatalf(\"expected column %s on table %s\", columnName, tableName)\n\t}\n}\n"
  },
  {
    "path": "internal/database/models.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// Model represents a SQL model definition (dbt-like).\ntype Model struct {\n\tID              string  `json:\"id\"`\n\tName            string  `json:\"name\"`\n\tDescription     string  `json:\"description\"`\n\tConnectionID    string  `json:\"connection_id\"`\n\tTargetDatabase  string  `json:\"target_database\"`\n\tMaterialization string  `json:\"materialization\"`\n\tSQLBody         string  `json:\"sql_body\"`\n\tTableEngine     string  `json:\"table_engine\"`\n\tOrderBy         string  `json:\"order_by\"`\n\tStatus          string  `json:\"status\"`\n\tLastError       *string `json:\"last_error\"`\n\tLastRunAt       *string `json:\"last_run_at\"`\n\tCreatedBy       *string `json:\"created_by\"`\n\tCreatedAt       string  `json:\"created_at\"`\n\tUpdatedAt       string  `json:\"updated_at\"`\n}\n\n// ModelRun represents a batch execution of models.\ntype ModelRun struct {\n\tID          string  `json:\"id\"`\n\tConnID      string  `json:\"connection_id\"`\n\tStatus      string  `json:\"status\"`\n\tTotal       int     `json:\"total_models\"`\n\tSucceeded   int     `json:\"succeeded\"`\n\tFailed      int     `json:\"failed\"`\n\tSkipped     int     `json:\"skipped\"`\n\tStartedAt   string  `json:\"started_at\"`\n\tFinishedAt  *string `json:\"finished_at\"`\n\tTriggeredBy *string `json:\"triggered_by\"`\n\tCreatedAt   string  `json:\"created_at\"`\n}\n\n// ModelRunResult represents per-model results within a run.\ntype ModelRunResult struct {\n\tID          string  `json:\"id\"`\n\tRunID       string  `json:\"run_id\"`\n\tModelID     string  `json:\"model_id\"`\n\tModelName   string  `json:\"model_name\"`\n\tStatus      string  `json:\"status\"`\n\tResolvedSQL *string `json:\"resolved_sql\"`\n\tElapsedMs   int64   `json:\"elapsed_ms\"`\n\tError       *string `json:\"error\"`\n\tStartedAt   *string `json:\"started_at\"`\n\tFinishedAt  *string `json:\"finished_at\"`\n\tCreatedAt   string  `json:\"created_at\"`\n}\n\n// ── Model CRUD ──────────────────────────────────────────────────────\n\n// GetModelsByConnection returns all models for a connection.\nfunc (db *DB) GetModelsByConnection(connectionID string) ([]Model, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, name, description, connection_id, target_database, materialization,\n\t\t        sql_body, table_engine, order_by, status, last_error, last_run_at,\n\t\t        created_by, created_at, updated_at\n\t\t FROM models WHERE connection_id = ? ORDER BY name ASC`, connectionID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get models: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar models []Model\n\tfor rows.Next() {\n\t\tm, err := scanModel(rows)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tmodels = append(models, m)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate model rows: %w\", err)\n\t}\n\treturn models, nil\n}\n\n// GetModelByID returns a single model by ID.\nfunc (db *DB) GetModelByID(id string) (*Model, error) {\n\trow := db.conn.QueryRow(\n\t\t`SELECT id, name, description, connection_id, target_database, materialization,\n\t\t        sql_body, table_engine, order_by, status, last_error, last_run_at,\n\t\t        created_by, created_at, updated_at\n\t\t FROM models WHERE id = ?`, id,\n\t)\n\tm, err := scanModelRow(row)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get model by id: %w\", err)\n\t}\n\treturn m, nil\n}\n\n// GetModelByName returns a model by connection and name.\nfunc (db *DB) GetModelByName(connectionID, name string) (*Model, error) {\n\trow := db.conn.QueryRow(\n\t\t`SELECT id, name, description, connection_id, target_database, materialization,\n\t\t        sql_body, table_engine, order_by, status, last_error, last_run_at,\n\t\t        created_by, created_at, updated_at\n\t\t FROM models WHERE connection_id = ? AND name = ?`, connectionID, name,\n\t)\n\tm, err := scanModelRow(row)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get model by name: %w\", err)\n\t}\n\treturn m, nil\n}\n\n// CreateModel creates a new model.\nfunc (db *DB) CreateModel(connectionID, name, description, targetDB, materialization, sqlBody, tableEngine, orderBy, createdBy string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar creator interface{}\n\tif createdBy != \"\" {\n\t\tcreator = createdBy\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO models (id, name, description, connection_id, target_database, materialization,\n\t\t                     sql_body, table_engine, order_by, status, created_by, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 'draft', ?, ?, ?)`,\n\t\tid, name, description, connectionID, targetDB, materialization,\n\t\tsqlBody, tableEngine, orderBy, creator, now, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create model: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdateModel updates an existing model.\nfunc (db *DB) UpdateModel(id, name, description, targetDB, materialization, sqlBody, tableEngine, orderBy string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\t_, err := db.conn.Exec(\n\t\t`UPDATE models SET name = ?, description = ?, target_database = ?, materialization = ?,\n\t\t        sql_body = ?, table_engine = ?, order_by = ?, updated_at = ?\n\t\t WHERE id = ?`,\n\t\tname, description, targetDB, materialization, sqlBody, tableEngine, orderBy, now, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update model: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteModel removes a model by ID.\nfunc (db *DB) DeleteModel(id string) error {\n\t_, err := db.conn.Exec(\"DELETE FROM models WHERE id = ?\", id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete model: %w\", err)\n\t}\n\treturn nil\n}\n\n// UpdateModelStatus updates a model's status and last error.\nfunc (db *DB) UpdateModelStatus(id, status, lastError string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar errVal interface{}\n\tif lastError != \"\" {\n\t\terrVal = lastError\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t\"UPDATE models SET status = ?, last_error = ?, last_run_at = ?, updated_at = ? WHERE id = ?\",\n\t\tstatus, errVal, now, now, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update model status: %w\", err)\n\t}\n\treturn nil\n}\n\n// ── Model Runs ──────────────────────────────────────────────────────\n\n// CreateModelRun creates a new run record.\nfunc (db *DB) CreateModelRun(connectionID string, totalModels int, triggeredBy string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar trigger interface{}\n\tif triggeredBy != \"\" {\n\t\ttrigger = triggeredBy\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO model_runs (id, connection_id, status, total_models, started_at, triggered_by, created_at)\n\t\t VALUES (?, ?, 'running', ?, ?, ?, ?)`,\n\t\tid, connectionID, totalModels, now, trigger, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create model run: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// FinalizeModelRun marks a run as complete.\nfunc (db *DB) FinalizeModelRun(id, status string, succeeded, failed, skipped int) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\t_, err := db.conn.Exec(\n\t\t`UPDATE model_runs SET status = ?, succeeded = ?, failed = ?, skipped = ?, finished_at = ? WHERE id = ?`,\n\t\tstatus, succeeded, failed, skipped, now, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"finalize model run: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetModelRuns returns recent runs for a connection.\nfunc (db *DB) GetModelRuns(connectionID string, limit, offset int) ([]ModelRun, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, connection_id, status, total_models, succeeded, failed, skipped,\n\t\t        started_at, finished_at, triggered_by, created_at\n\t\t FROM model_runs WHERE connection_id = ? ORDER BY started_at DESC LIMIT ? OFFSET ?`,\n\t\tconnectionID, limit, offset,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get model runs: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar runs []ModelRun\n\tfor rows.Next() {\n\t\tvar r ModelRun\n\t\tvar finished, trigger sql.NullString\n\t\tif err := rows.Scan(&r.ID, &r.ConnID, &r.Status, &r.Total, &r.Succeeded,\n\t\t\t&r.Failed, &r.Skipped, &r.StartedAt, &finished, &trigger, &r.CreatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan model run: %w\", err)\n\t\t}\n\t\tr.FinishedAt = nullStringToPtr(finished)\n\t\tr.TriggeredBy = nullStringToPtr(trigger)\n\t\truns = append(runs, r)\n\t}\n\treturn runs, rows.Err()\n}\n\n// GetModelRunByID returns a single run.\nfunc (db *DB) GetModelRunByID(id string) (*ModelRun, error) {\n\trow := db.conn.QueryRow(\n\t\t`SELECT id, connection_id, status, total_models, succeeded, failed, skipped,\n\t\t        started_at, finished_at, triggered_by, created_at\n\t\t FROM model_runs WHERE id = ?`, id,\n\t)\n\n\tvar r ModelRun\n\tvar finished, trigger sql.NullString\n\terr := row.Scan(&r.ID, &r.ConnID, &r.Status, &r.Total, &r.Succeeded,\n\t\t&r.Failed, &r.Skipped, &r.StartedAt, &finished, &trigger, &r.CreatedAt)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get model run: %w\", err)\n\t}\n\tr.FinishedAt = nullStringToPtr(finished)\n\tr.TriggeredBy = nullStringToPtr(trigger)\n\treturn &r, nil\n}\n\n// ── Model Run Results ───────────────────────────────────────────────\n\n// CreateModelRunResult creates a pending result record for a model in a run.\nfunc (db *DB) CreateModelRunResult(runID, modelID, modelName string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO model_run_results (id, run_id, model_id, model_name, status, created_at)\n\t\t VALUES (?, ?, ?, ?, 'pending', ?)`,\n\t\tid, runID, modelID, modelName, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create model run result: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdateModelRunResult updates the result for a specific model in a run.\nfunc (db *DB) UpdateModelRunResult(runID, modelID, status, resolvedSQL string, elapsedMs int64, errMsg string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar sqlVal, errVal interface{}\n\tif resolvedSQL != \"\" {\n\t\tsqlVal = resolvedSQL\n\t}\n\tif errMsg != \"\" {\n\t\terrVal = errMsg\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`UPDATE model_run_results SET status = ?, resolved_sql = ?, elapsed_ms = ?, error = ?,\n\t\t        started_at = COALESCE(started_at, ?), finished_at = ?\n\t\t WHERE run_id = ? AND model_id = ?`,\n\t\tstatus, sqlVal, elapsedMs, errVal, now, now, runID, modelID,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update model run result: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetModelRunResults returns all results for a run.\nfunc (db *DB) GetModelRunResults(runID string) ([]ModelRunResult, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, run_id, model_id, model_name, status, resolved_sql, elapsed_ms,\n\t\t        error, started_at, finished_at, created_at\n\t\t FROM model_run_results WHERE run_id = ? ORDER BY created_at ASC`, runID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get model run results: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []ModelRunResult\n\tfor rows.Next() {\n\t\tvar r ModelRunResult\n\t\tvar resolvedSQL, errStr, started, finished sql.NullString\n\t\tif err := rows.Scan(&r.ID, &r.RunID, &r.ModelID, &r.ModelName, &r.Status,\n\t\t\t&resolvedSQL, &r.ElapsedMs, &errStr, &started, &finished, &r.CreatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan model run result: %w\", err)\n\t\t}\n\t\tr.ResolvedSQL = nullStringToPtr(resolvedSQL)\n\t\tr.Error = nullStringToPtr(errStr)\n\t\tr.StartedAt = nullStringToPtr(started)\n\t\tr.FinishedAt = nullStringToPtr(finished)\n\t\tresults = append(results, r)\n\t}\n\treturn results, rows.Err()\n}\n\n// HasRunningModelRun checks if there's an active run for a connection.\nfunc (db *DB) HasRunningModelRun(connectionID string) (bool, error) {\n\tvar count int\n\terr := db.conn.QueryRow(\n\t\t\"SELECT COUNT(*) FROM model_runs WHERE connection_id = ? AND status = 'running'\",\n\t\tconnectionID,\n\t).Scan(&count)\n\tif err != nil {\n\t\treturn false, fmt.Errorf(\"check running model run: %w\", err)\n\t}\n\treturn count > 0, nil\n}\n\n// ── Helpers ─────────────────────────────────────────────────────────\n\nfunc scanModel(rows *sql.Rows) (Model, error) {\n\tvar m Model\n\tvar lastErr, lastRun, createdBy sql.NullString\n\tif err := rows.Scan(&m.ID, &m.Name, &m.Description, &m.ConnectionID,\n\t\t&m.TargetDatabase, &m.Materialization, &m.SQLBody, &m.TableEngine,\n\t\t&m.OrderBy, &m.Status, &lastErr, &lastRun, &createdBy,\n\t\t&m.CreatedAt, &m.UpdatedAt); err != nil {\n\t\treturn m, fmt.Errorf(\"scan model: %w\", err)\n\t}\n\tm.LastError = nullStringToPtr(lastErr)\n\tm.LastRunAt = nullStringToPtr(lastRun)\n\tm.CreatedBy = nullStringToPtr(createdBy)\n\treturn m, nil\n}\n\nfunc scanModelRow(row *sql.Row) (*Model, error) {\n\tvar m Model\n\tvar lastErr, lastRun, createdBy sql.NullString\n\terr := row.Scan(&m.ID, &m.Name, &m.Description, &m.ConnectionID,\n\t\t&m.TargetDatabase, &m.Materialization, &m.SQLBody, &m.TableEngine,\n\t\t&m.OrderBy, &m.Status, &lastErr, &lastRun, &createdBy,\n\t\t&m.CreatedAt, &m.UpdatedAt)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tm.LastError = nullStringToPtr(lastErr)\n\tm.LastRunAt = nullStringToPtr(lastRun)\n\tm.CreatedBy = nullStringToPtr(createdBy)\n\treturn &m, nil\n}\n\n// ── Model Schedules ─────────────────────────────────────────────────\n\n// ModelSchedule represents a cron schedule for running a model pipeline.\ntype ModelSchedule struct {\n\tID            string  `json:\"id\"`\n\tConnectionID  string  `json:\"connection_id\"`\n\tAnchorModelID *string `json:\"anchor_model_id\"`\n\tCron          string  `json:\"cron\"`\n\tEnabled       bool    `json:\"enabled\"`\n\tLastRunAt     *string `json:\"last_run_at\"`\n\tNextRunAt     *string `json:\"next_run_at\"`\n\tLastStatus    *string `json:\"last_status\"`\n\tLastError     *string `json:\"last_error\"`\n\tCreatedBy     *string `json:\"created_by\"`\n\tCreatedAt     string  `json:\"created_at\"`\n\tUpdatedAt     string  `json:\"updated_at\"`\n}\n\n// GetModelSchedulesByConnection returns all schedules for a connection.\nfunc (db *DB) GetModelSchedulesByConnection(connectionID string) ([]ModelSchedule, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, connection_id, anchor_model_id, cron, enabled, last_run_at, next_run_at,\n\t\t        last_status, last_error, created_by, created_at, updated_at\n\t\t FROM model_schedules WHERE connection_id = ?`, connectionID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get model schedules: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar schedules []ModelSchedule\n\tfor rows.Next() {\n\t\ts, err := scanModelSchedule(rows)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tschedules = append(schedules, s)\n\t}\n\treturn schedules, rows.Err()\n}\n\n// GetModelScheduleByAnchor returns the schedule for a specific pipeline anchor, or nil.\nfunc (db *DB) GetModelScheduleByAnchor(connectionID, anchorModelID string) (*ModelSchedule, error) {\n\trow := db.conn.QueryRow(\n\t\t`SELECT id, connection_id, anchor_model_id, cron, enabled, last_run_at, next_run_at,\n\t\t        last_status, last_error, created_by, created_at, updated_at\n\t\t FROM model_schedules WHERE connection_id = ? AND anchor_model_id = ?`,\n\t\tconnectionID, anchorModelID,\n\t)\n\n\tvar s ModelSchedule\n\tvar enabled int\n\tvar anchor, lastRun, nextRun, lastStatus, lastErr, createdBy sql.NullString\n\terr := row.Scan(&s.ID, &s.ConnectionID, &anchor, &s.Cron, &enabled,\n\t\t&lastRun, &nextRun, &lastStatus, &lastErr, &createdBy,\n\t\t&s.CreatedAt, &s.UpdatedAt)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get model schedule by anchor: %w\", err)\n\t}\n\ts.Enabled = enabled == 1\n\ts.AnchorModelID = nullStringToPtr(anchor)\n\ts.LastRunAt = nullStringToPtr(lastRun)\n\ts.NextRunAt = nullStringToPtr(nextRun)\n\ts.LastStatus = nullStringToPtr(lastStatus)\n\ts.LastError = nullStringToPtr(lastErr)\n\ts.CreatedBy = nullStringToPtr(createdBy)\n\treturn &s, nil\n}\n\n// UpsertModelSchedule creates or replaces a model schedule for a pipeline anchor.\nfunc (db *DB) UpsertModelSchedule(connectionID, anchorModelID, cron, nextRunAt, createdBy string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar creator interface{}\n\tif createdBy != \"\" {\n\t\tcreator = createdBy\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO model_schedules\n\t\t (id, connection_id, anchor_model_id, cron, enabled, next_run_at, created_by, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, 1, ?, ?, ?, ?)\n\t\t ON CONFLICT(connection_id, anchor_model_id)\n\t\t DO UPDATE SET cron = excluded.cron, enabled = 1, next_run_at = excluded.next_run_at,\n\t\t               created_by = excluded.created_by, updated_at = excluded.updated_at`,\n\t\tid, connectionID, anchorModelID, cron, nextRunAt, creator, now, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"upsert model schedule: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdateModelScheduleStatusByID updates a schedule after a run attempt, by schedule ID.\nfunc (db *DB) UpdateModelScheduleStatusByID(scheduleID, status, lastError string, nextRunAt *string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar errVal interface{}\n\tif lastError != \"\" {\n\t\terrVal = lastError\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`UPDATE model_schedules SET last_run_at = ?, last_status = ?, last_error = ?,\n\t\t        next_run_at = ?, updated_at = ?\n\t\t WHERE id = ?`,\n\t\tnow, status, errVal, nextRunAt, now, scheduleID,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update model schedule status: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteModelScheduleByAnchor removes the schedule for a specific pipeline anchor.\nfunc (db *DB) DeleteModelScheduleByAnchor(connectionID, anchorModelID string) error {\n\t_, err := db.conn.Exec(\n\t\t\"DELETE FROM model_schedules WHERE connection_id = ? AND anchor_model_id = ?\",\n\t\tconnectionID, anchorModelID,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete model schedule: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetEnabledModelSchedules returns all enabled model schedules.\nfunc (db *DB) GetEnabledModelSchedules() ([]ModelSchedule, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, connection_id, anchor_model_id, cron, enabled, last_run_at, next_run_at,\n\t\t        last_status, last_error, created_by, created_at, updated_at\n\t\t FROM model_schedules WHERE enabled = 1`,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get enabled model schedules: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar schedules []ModelSchedule\n\tfor rows.Next() {\n\t\ts, err := scanModelSchedule(rows)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tschedules = append(schedules, s)\n\t}\n\treturn schedules, rows.Err()\n}\n\nfunc scanModelSchedule(rows *sql.Rows) (ModelSchedule, error) {\n\tvar s ModelSchedule\n\tvar enabled int\n\tvar anchor, lastRun, nextRun, lastStatus, lastErr, createdBy sql.NullString\n\tif err := rows.Scan(&s.ID, &s.ConnectionID, &anchor, &s.Cron, &enabled,\n\t\t&lastRun, &nextRun, &lastStatus, &lastErr, &createdBy,\n\t\t&s.CreatedAt, &s.UpdatedAt); err != nil {\n\t\treturn s, fmt.Errorf(\"scan model schedule: %w\", err)\n\t}\n\ts.Enabled = enabled == 1\n\ts.AnchorModelID = nullStringToPtr(anchor)\n\ts.LastRunAt = nullStringToPtr(lastRun)\n\ts.NextRunAt = nullStringToPtr(nextRun)\n\ts.LastStatus = nullStringToPtr(lastStatus)\n\ts.LastError = nullStringToPtr(lastErr)\n\ts.CreatedBy = nullStringToPtr(createdBy)\n\treturn s, nil\n}\n"
  },
  {
    "path": "internal/database/pipelines.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// Pipeline represents a data ingestion pipeline.\ntype Pipeline struct {\n\tID            string  `json:\"id\"`\n\tName          string  `json:\"name\"`\n\tDescription   *string `json:\"description\"`\n\tConnectionID  string  `json:\"connection_id\"`\n\tStatus        string  `json:\"status\"`\n\tConfig        string  `json:\"config\"`\n\tCreatedBy     *string `json:\"created_by\"`\n\tLastStartedAt *string `json:\"last_started_at\"`\n\tLastStoppedAt *string `json:\"last_stopped_at\"`\n\tLastError     *string `json:\"last_error\"`\n\tCreatedAt     string  `json:\"created_at\"`\n\tUpdatedAt     string  `json:\"updated_at\"`\n}\n\n// PipelineNode represents a node in a pipeline graph.\ntype PipelineNode struct {\n\tID              string  `json:\"id\"`\n\tPipelineID      string  `json:\"pipeline_id\"`\n\tNodeType        string  `json:\"node_type\"`\n\tLabel           string  `json:\"label\"`\n\tPositionX       float64 `json:\"position_x\"`\n\tPositionY       float64 `json:\"position_y\"`\n\tConfigEncrypted string  `json:\"config_encrypted\"`\n\tCreatedAt       string  `json:\"created_at\"`\n\tUpdatedAt       string  `json:\"updated_at\"`\n}\n\n// PipelineEdge represents a connection between two nodes.\ntype PipelineEdge struct {\n\tID           string  `json:\"id\"`\n\tPipelineID   string  `json:\"pipeline_id\"`\n\tSourceNodeID string  `json:\"source_node_id\"`\n\tTargetNodeID string  `json:\"target_node_id\"`\n\tSourceHandle *string `json:\"source_handle\"`\n\tTargetHandle *string `json:\"target_handle\"`\n\tCreatedAt    string  `json:\"created_at\"`\n}\n\n// PipelineRun represents an execution run of a pipeline.\ntype PipelineRun struct {\n\tID           string  `json:\"id\"`\n\tPipelineID   string  `json:\"pipeline_id\"`\n\tStatus       string  `json:\"status\"`\n\tStartedAt    string  `json:\"started_at\"`\n\tFinishedAt   *string `json:\"finished_at\"`\n\tRowsIngested int64   `json:\"rows_ingested\"`\n\tBytesIngested int64  `json:\"bytes_ingested\"`\n\tErrorsCount  int64   `json:\"errors_count\"`\n\tLastError    *string `json:\"last_error\"`\n\tMetricsJSON  string  `json:\"metrics_json\"`\n\tCreatedAt    string  `json:\"created_at\"`\n}\n\n// PipelineRunLog represents a log entry for a pipeline run.\ntype PipelineRunLog struct {\n\tID        string `json:\"id\"`\n\tRunID     string `json:\"run_id\"`\n\tLevel     string `json:\"level\"`\n\tMessage   string `json:\"message\"`\n\tCreatedAt string `json:\"created_at\"`\n}\n\n// ── Pipeline CRUD ──────────────────────────────────────────────────\n\n// GetPipelines retrieves all pipelines.\nfunc (db *DB) GetPipelines() ([]Pipeline, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, name, description, connection_id, status, config,\n\t\t        created_by, last_started_at, last_stopped_at, last_error,\n\t\t        created_at, updated_at\n\t\t FROM pipelines ORDER BY updated_at DESC`,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get pipelines: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar pipelines []Pipeline\n\tfor rows.Next() {\n\t\tp, err := scanPipeline(rows)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpipelines = append(pipelines, p)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate pipeline rows: %w\", err)\n\t}\n\treturn pipelines, nil\n}\n\n// GetPipelineByID retrieves a pipeline by ID.\nfunc (db *DB) GetPipelineByID(id string) (*Pipeline, error) {\n\trow := db.conn.QueryRow(\n\t\t`SELECT id, name, description, connection_id, status, config,\n\t\t        created_by, last_started_at, last_stopped_at, last_error,\n\t\t        created_at, updated_at\n\t\t FROM pipelines WHERE id = ?`, id,\n\t)\n\n\tvar p Pipeline\n\tvar desc, createdBy, lastStarted, lastStopped, lastErr sql.NullString\n\terr := row.Scan(&p.ID, &p.Name, &desc, &p.ConnectionID, &p.Status, &p.Config,\n\t\t&createdBy, &lastStarted, &lastStopped, &lastErr,\n\t\t&p.CreatedAt, &p.UpdatedAt)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get pipeline by id: %w\", err)\n\t}\n\tp.Description = nullStringToPtr(desc)\n\tp.CreatedBy = nullStringToPtr(createdBy)\n\tp.LastStartedAt = nullStringToPtr(lastStarted)\n\tp.LastStoppedAt = nullStringToPtr(lastStopped)\n\tp.LastError = nullStringToPtr(lastErr)\n\treturn &p, nil\n}\n\n// GetPipelinesByStatus retrieves pipelines with a given status.\nfunc (db *DB) GetPipelinesByStatus(status string) ([]Pipeline, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, name, description, connection_id, status, config,\n\t\t        created_by, last_started_at, last_stopped_at, last_error,\n\t\t        created_at, updated_at\n\t\t FROM pipelines WHERE status = ? ORDER BY updated_at DESC`, status,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get pipelines by status: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar pipelines []Pipeline\n\tfor rows.Next() {\n\t\tp, err := scanPipeline(rows)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpipelines = append(pipelines, p)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate pipeline rows: %w\", err)\n\t}\n\treturn pipelines, nil\n}\n\n// CreatePipeline creates a new pipeline and returns its ID.\nfunc (db *DB) CreatePipeline(name, description, connectionID, createdBy string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar desc, creator interface{}\n\tif description != \"\" {\n\t\tdesc = description\n\t}\n\tif createdBy != \"\" {\n\t\tcreator = createdBy\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO pipelines (id, name, description, connection_id, status, config, created_by, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, 'draft', '{}', ?, ?, ?)`,\n\t\tid, name, desc, connectionID, creator, now, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create pipeline: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdatePipeline updates a pipeline's name and description.\nfunc (db *DB) UpdatePipeline(id, name, description string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar desc interface{}\n\tif description != \"\" {\n\t\tdesc = description\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t\"UPDATE pipelines SET name = ?, description = ?, updated_at = ? WHERE id = ?\",\n\t\tname, desc, now, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update pipeline: %w\", err)\n\t}\n\treturn nil\n}\n\n// UpdatePipelineStatus updates a pipeline's status and optional error/timestamp fields.\nfunc (db *DB) UpdatePipelineStatus(id, status, lastError string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar errVal interface{}\n\tif lastError != \"\" {\n\t\terrVal = lastError\n\t}\n\n\tvar startedAt, stoppedAt interface{}\n\tswitch status {\n\tcase \"running\", \"starting\":\n\t\tstartedAt = now\n\tcase \"stopped\", \"error\":\n\t\tstoppedAt = now\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`UPDATE pipelines SET status = ?, last_error = ?,\n\t\t last_started_at = COALESCE(?, last_started_at),\n\t\t last_stopped_at = COALESCE(?, last_stopped_at),\n\t\t updated_at = ? WHERE id = ?`,\n\t\tstatus, errVal, startedAt, stoppedAt, now, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update pipeline status: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeletePipeline deletes a pipeline and all related data (cascade).\nfunc (db *DB) DeletePipeline(id string) error {\n\t_, err := db.conn.Exec(\"DELETE FROM pipelines WHERE id = ?\", id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete pipeline: %w\", err)\n\t}\n\treturn nil\n}\n\n// ── Pipeline Graph ─────────────────────────────────────────────────\n\n// SavePipelineGraph atomically replaces all nodes and edges for a pipeline.\nfunc (db *DB) SavePipelineGraph(pipelineID string, nodes []PipelineNode, edges []PipelineEdge, viewportConfig string) error {\n\ttx, err := db.conn.Begin()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"begin graph transaction: %w\", err)\n\t}\n\tdefer tx.Rollback()\n\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\t// Delete existing nodes and edges (edges cascade from nodes)\n\tif _, err := tx.Exec(\"DELETE FROM pipeline_edges WHERE pipeline_id = ?\", pipelineID); err != nil {\n\t\treturn fmt.Errorf(\"delete old edges: %w\", err)\n\t}\n\tif _, err := tx.Exec(\"DELETE FROM pipeline_nodes WHERE pipeline_id = ?\", pipelineID); err != nil {\n\t\treturn fmt.Errorf(\"delete old nodes: %w\", err)\n\t}\n\n\t// Insert nodes\n\tfor _, n := range nodes {\n\t\tnodeID := n.ID\n\t\tif nodeID == \"\" {\n\t\t\tnodeID = uuid.NewString()\n\t\t}\n\t\t_, err := tx.Exec(\n\t\t\t`INSERT INTO pipeline_nodes (id, pipeline_id, node_type, label, position_x, position_y, config_encrypted, created_at, updated_at)\n\t\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\t\tnodeID, pipelineID, n.NodeType, n.Label, n.PositionX, n.PositionY, n.ConfigEncrypted, now, now,\n\t\t)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"insert node %s: %w\", nodeID, err)\n\t\t}\n\t}\n\n\t// Insert edges\n\tfor _, e := range edges {\n\t\tedgeID := e.ID\n\t\tif edgeID == \"\" {\n\t\t\tedgeID = uuid.NewString()\n\t\t}\n\t\tvar srcHandle, tgtHandle interface{}\n\t\tif e.SourceHandle != nil {\n\t\t\tsrcHandle = *e.SourceHandle\n\t\t}\n\t\tif e.TargetHandle != nil {\n\t\t\ttgtHandle = *e.TargetHandle\n\t\t}\n\t\t_, err := tx.Exec(\n\t\t\t`INSERT INTO pipeline_edges (id, pipeline_id, source_node_id, target_node_id, source_handle, target_handle, created_at)\n\t\t\t VALUES (?, ?, ?, ?, ?, ?, ?)`,\n\t\t\tedgeID, pipelineID, e.SourceNodeID, e.TargetNodeID, srcHandle, tgtHandle, now,\n\t\t)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"insert edge %s: %w\", edgeID, err)\n\t\t}\n\t}\n\n\t// Update pipeline config (viewport) and updated_at\n\tif viewportConfig != \"\" {\n\t\tif _, err := tx.Exec(\"UPDATE pipelines SET config = ?, updated_at = ? WHERE id = ?\", viewportConfig, now, pipelineID); err != nil {\n\t\t\treturn fmt.Errorf(\"update pipeline config: %w\", err)\n\t\t}\n\t} else {\n\t\tif _, err := tx.Exec(\"UPDATE pipelines SET updated_at = ? WHERE id = ?\", now, pipelineID); err != nil {\n\t\t\treturn fmt.Errorf(\"update pipeline updated_at: %w\", err)\n\t\t}\n\t}\n\n\tif err := tx.Commit(); err != nil {\n\t\treturn fmt.Errorf(\"commit graph transaction: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetPipelineGraph retrieves all nodes and edges for a pipeline.\nfunc (db *DB) GetPipelineGraph(pipelineID string) ([]PipelineNode, []PipelineEdge, error) {\n\t// Nodes\n\tnodeRows, err := db.conn.Query(\n\t\t`SELECT id, pipeline_id, node_type, label, position_x, position_y, config_encrypted, created_at, updated_at\n\t\t FROM pipeline_nodes WHERE pipeline_id = ? ORDER BY created_at ASC`, pipelineID,\n\t)\n\tif err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"get pipeline nodes: %w\", err)\n\t}\n\tdefer nodeRows.Close()\n\n\tvar nodes []PipelineNode\n\tfor nodeRows.Next() {\n\t\tvar n PipelineNode\n\t\tif err := nodeRows.Scan(&n.ID, &n.PipelineID, &n.NodeType, &n.Label,\n\t\t\t&n.PositionX, &n.PositionY, &n.ConfigEncrypted,\n\t\t\t&n.CreatedAt, &n.UpdatedAt); err != nil {\n\t\t\treturn nil, nil, fmt.Errorf(\"scan pipeline node: %w\", err)\n\t\t}\n\t\tnodes = append(nodes, n)\n\t}\n\tif err := nodeRows.Err(); err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"iterate node rows: %w\", err)\n\t}\n\n\t// Edges\n\tedgeRows, err := db.conn.Query(\n\t\t`SELECT id, pipeline_id, source_node_id, target_node_id, source_handle, target_handle, created_at\n\t\t FROM pipeline_edges WHERE pipeline_id = ? ORDER BY created_at ASC`, pipelineID,\n\t)\n\tif err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"get pipeline edges: %w\", err)\n\t}\n\tdefer edgeRows.Close()\n\n\tvar edges []PipelineEdge\n\tfor edgeRows.Next() {\n\t\tvar e PipelineEdge\n\t\tvar srcHandle, tgtHandle sql.NullString\n\t\tif err := edgeRows.Scan(&e.ID, &e.PipelineID, &e.SourceNodeID, &e.TargetNodeID,\n\t\t\t&srcHandle, &tgtHandle, &e.CreatedAt); err != nil {\n\t\t\treturn nil, nil, fmt.Errorf(\"scan pipeline edge: %w\", err)\n\t\t}\n\t\te.SourceHandle = nullStringToPtr(srcHandle)\n\t\te.TargetHandle = nullStringToPtr(tgtHandle)\n\t\tedges = append(edges, e)\n\t}\n\tif err := edgeRows.Err(); err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"iterate edge rows: %w\", err)\n\t}\n\n\treturn nodes, edges, nil\n}\n\n// ── Pipeline Runs ──────────────────────────────────────────────────\n\n// CreatePipelineRun creates a new run record and returns its ID.\nfunc (db *DB) CreatePipelineRun(pipelineID, status string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO pipeline_runs (id, pipeline_id, status, started_at, created_at)\n\t\t VALUES (?, ?, ?, ?, ?)`,\n\t\tid, pipelineID, status, now, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create pipeline run: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdatePipelineRun updates a run's status and metrics.\nfunc (db *DB) UpdatePipelineRun(id, status string, rowsIngested, bytesIngested, errorsCount int64, lastError, metricsJSON string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar errVal interface{}\n\tif lastError != \"\" {\n\t\terrVal = lastError\n\t}\n\tvar finished interface{}\n\tif status != \"running\" {\n\t\tfinished = now\n\t}\n\tif metricsJSON == \"\" {\n\t\tmetricsJSON = \"{}\"\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`UPDATE pipeline_runs SET status = ?, finished_at = ?, rows_ingested = ?, bytes_ingested = ?,\n\t\t errors_count = ?, last_error = ?, metrics_json = ? WHERE id = ?`,\n\t\tstatus, finished, rowsIngested, bytesIngested, errorsCount, errVal, metricsJSON, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update pipeline run: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetPipelineRuns retrieves runs for a pipeline with limit/offset.\nfunc (db *DB) GetPipelineRuns(pipelineID string, limit, offset int) ([]PipelineRun, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, pipeline_id, status, started_at, finished_at, rows_ingested, bytes_ingested,\n\t\t        errors_count, last_error, metrics_json, created_at\n\t\t FROM pipeline_runs WHERE pipeline_id = ? ORDER BY started_at DESC LIMIT ? OFFSET ?`,\n\t\tpipelineID, limit, offset,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get pipeline runs: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar runs []PipelineRun\n\tfor rows.Next() {\n\t\tvar r PipelineRun\n\t\tvar finished, lastErr sql.NullString\n\t\tif err := rows.Scan(&r.ID, &r.PipelineID, &r.Status, &r.StartedAt, &finished,\n\t\t\t&r.RowsIngested, &r.BytesIngested, &r.ErrorsCount, &lastErr,\n\t\t\t&r.MetricsJSON, &r.CreatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan pipeline run: %w\", err)\n\t\t}\n\t\tr.FinishedAt = nullStringToPtr(finished)\n\t\tr.LastError = nullStringToPtr(lastErr)\n\t\truns = append(runs, r)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate run rows: %w\", err)\n\t}\n\treturn runs, nil\n}\n\n// ── Pipeline Run Logs ──────────────────────────────────────────────\n\n// CreatePipelineRunLog creates a log entry for a pipeline run.\nfunc (db *DB) CreatePipelineRunLog(runID, level, message string) error {\n\tid := uuid.NewString()\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO pipeline_run_logs (id, run_id, level, message, created_at)\n\t\t VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP)`,\n\t\tid, runID, level, message,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"create pipeline run log: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetPipelineRunLogs retrieves logs for a pipeline run.\nfunc (db *DB) GetPipelineRunLogs(runID string, limit int) ([]PipelineRunLog, error) {\n\tif limit <= 0 {\n\t\tlimit = 200\n\t}\n\n\trows, err := db.conn.Query(\n\t\t`SELECT id, run_id, level, message, created_at\n\t\t FROM pipeline_run_logs WHERE run_id = ? ORDER BY created_at DESC LIMIT ?`,\n\t\trunID, limit,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get pipeline run logs: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar logs []PipelineRunLog\n\tfor rows.Next() {\n\t\tvar l PipelineRunLog\n\t\tif err := rows.Scan(&l.ID, &l.RunID, &l.Level, &l.Message, &l.CreatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan pipeline run log: %w\", err)\n\t\t}\n\t\tlogs = append(logs, l)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate log rows: %w\", err)\n\t}\n\treturn logs, nil\n}\n\n// ── Helpers ────────────────────────────────────────────────────────\n\n// scanPipeline scans a pipeline row from a *sql.Rows.\nfunc scanPipeline(rows *sql.Rows) (Pipeline, error) {\n\tvar p Pipeline\n\tvar desc, createdBy, lastStarted, lastStopped, lastErr sql.NullString\n\tif err := rows.Scan(&p.ID, &p.Name, &desc, &p.ConnectionID, &p.Status, &p.Config,\n\t\t&createdBy, &lastStarted, &lastStopped, &lastErr,\n\t\t&p.CreatedAt, &p.UpdatedAt); err != nil {\n\t\treturn p, fmt.Errorf(\"scan pipeline: %w\", err)\n\t}\n\tp.Description = nullStringToPtr(desc)\n\tp.CreatedBy = nullStringToPtr(createdBy)\n\tp.LastStartedAt = nullStringToPtr(lastStarted)\n\tp.LastStoppedAt = nullStringToPtr(lastStopped)\n\tp.LastError = nullStringToPtr(lastErr)\n\treturn p, nil\n}\n"
  },
  {
    "path": "internal/database/rate_limits.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"time\"\n)\n\n// RateLimitEntry represents a rate limit record.\ntype RateLimitEntry struct {\n\tIdentifier     string  `json:\"identifier\"`\n\tType           string  `json:\"type\"`\n\tAttempts       int     `json:\"attempts\"`\n\tFirstAttemptAt string  `json:\"first_attempt_at\"`\n\tLockedUntil    *string `json:\"locked_until\"`\n\tCreatedAt      string  `json:\"created_at\"`\n\tUpdatedAt      string  `json:\"updated_at\"`\n}\n\n// GetRateLimit retrieves a rate limit entry by identifier.\nfunc (db *DB) GetRateLimit(identifier string) (*RateLimitEntry, error) {\n\trow := db.conn.QueryRow(\n\t\t\"SELECT identifier, type, attempts, first_attempt_at, locked_until, created_at, updated_at FROM rate_limits WHERE identifier = ?\",\n\t\tidentifier,\n\t)\n\n\tvar r RateLimitEntry\n\tvar lockedUntil sql.NullString\n\n\terr := row.Scan(\n\t\t&r.Identifier, &r.Type, &r.Attempts,\n\t\t&r.FirstAttemptAt, &lockedUntil,\n\t\t&r.CreatedAt, &r.UpdatedAt,\n\t)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get rate limit: %w\", err)\n\t}\n\n\tr.LockedUntil = nullStringToPtr(lockedUntil)\n\treturn &r, nil\n}\n\n// UpsertRateLimit inserts or updates a rate limit entry.\nfunc (db *DB) UpsertRateLimit(identifier, limitType string, attempts int, firstAttempt time.Time, lockedUntil *time.Time) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tfirstAttemptStr := firstAttempt.UTC().Format(time.RFC3339)\n\n\tvar lockedUntilVal interface{}\n\tif lockedUntil != nil {\n\t\tlockedUntilVal = lockedUntil.UTC().Format(time.RFC3339)\n\t}\n\n\t_, err := db.conn.Exec(`\n\t\tINSERT INTO rate_limits (identifier, type, attempts, first_attempt_at, locked_until, created_at, updated_at)\n\t\tVALUES (?, ?, ?, ?, ?, ?, ?)\n\t\tON CONFLICT(identifier) DO UPDATE SET\n\t\t\ttype = excluded.type,\n\t\t\tattempts = excluded.attempts,\n\t\t\tfirst_attempt_at = excluded.first_attempt_at,\n\t\t\tlocked_until = excluded.locked_until,\n\t\t\tupdated_at = excluded.updated_at\n\t`, identifier, limitType, attempts, firstAttemptStr, lockedUntilVal, now, now)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"upsert rate limit: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteRateLimit deletes a rate limit entry by identifier.\nfunc (db *DB) DeleteRateLimit(identifier string) error {\n\t_, err := db.conn.Exec(\"DELETE FROM rate_limits WHERE identifier = ?\", identifier)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete rate limit: %w\", err)\n\t}\n\treturn nil\n}\n\n// CleanupExpiredRateLimits removes rate limit entries that have expired based on the window.\n// Returns the number of entries deleted.\nfunc (db *DB) CleanupExpiredRateLimits(windowMs int64) (int64, error) {\n\tcutoff := time.Now().UTC().Add(-time.Duration(windowMs) * time.Millisecond).Format(time.RFC3339)\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tresult, err := db.conn.Exec(`\n\t\tDELETE FROM rate_limits\n\t\tWHERE (first_attempt_at < ? AND (locked_until IS NULL OR locked_until < ?))\n\t\t   OR (locked_until IS NOT NULL AND locked_until < ?)\n\t`, cutoff, now, now)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"cleanup expired rate limits: %w\", err)\n\t}\n\n\tcount, err := result.RowsAffected()\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"get rows affected: %w\", err)\n\t}\n\treturn count, nil\n}\n"
  },
  {
    "path": "internal/database/saved_queries.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// SavedQuery represents a saved SQL query.\ntype SavedQuery struct {\n\tID           string  `json:\"id\"`\n\tName         string  `json:\"name\"`\n\tDescription  *string `json:\"description\"`\n\tQuery        string  `json:\"query\"`\n\tConnectionID *string `json:\"connection_id\"`\n\tCreatedBy    *string `json:\"created_by\"`\n\tCreatedAt    string  `json:\"created_at\"`\n\tUpdatedAt    string  `json:\"updated_at\"`\n}\n\n// CreateSavedQueryParams holds parameters for creating a saved query.\ntype CreateSavedQueryParams struct {\n\tName         string\n\tDescription  string\n\tQuery        string\n\tConnectionID string\n\tCreatedBy    string\n}\n\n// GetSavedQueries retrieves all saved queries.\nfunc (db *DB) GetSavedQueries() ([]SavedQuery, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, name, description, query, connection_id, created_by, created_at, updated_at\n\t\t FROM saved_queries ORDER BY updated_at DESC`,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get saved queries: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar queries []SavedQuery\n\tfor rows.Next() {\n\t\tvar q SavedQuery\n\t\tvar desc, connID, createdBy sql.NullString\n\t\tif err := rows.Scan(&q.ID, &q.Name, &desc, &q.Query, &connID, &createdBy, &q.CreatedAt, &q.UpdatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan saved query: %w\", err)\n\t\t}\n\t\tq.Description = nullStringToPtr(desc)\n\t\tq.ConnectionID = nullStringToPtr(connID)\n\t\tq.CreatedBy = nullStringToPtr(createdBy)\n\t\tqueries = append(queries, q)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate saved query rows: %w\", err)\n\t}\n\treturn queries, nil\n}\n\n// GetSavedQueryByID retrieves a saved query by ID.\nfunc (db *DB) GetSavedQueryByID(id string) (*SavedQuery, error) {\n\trow := db.conn.QueryRow(\n\t\t`SELECT id, name, description, query, connection_id, created_by, created_at, updated_at\n\t\t FROM saved_queries WHERE id = ?`, id,\n\t)\n\n\tvar q SavedQuery\n\tvar desc, connID, createdBy sql.NullString\n\terr := row.Scan(&q.ID, &q.Name, &desc, &q.Query, &connID, &createdBy, &q.CreatedAt, &q.UpdatedAt)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get saved query by id: %w\", err)\n\t}\n\tq.Description = nullStringToPtr(desc)\n\tq.ConnectionID = nullStringToPtr(connID)\n\tq.CreatedBy = nullStringToPtr(createdBy)\n\treturn &q, nil\n}\n\n// CreateSavedQuery creates a new saved query and returns its ID.\nfunc (db *DB) CreateSavedQuery(params CreateSavedQueryParams) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar desc, connID, createdBy interface{}\n\tif params.Description != \"\" {\n\t\tdesc = params.Description\n\t}\n\tif params.ConnectionID != \"\" {\n\t\tconnID = params.ConnectionID\n\t}\n\tif params.CreatedBy != \"\" {\n\t\tcreatedBy = params.CreatedBy\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO saved_queries (id, name, description, query, connection_id, created_by, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tid, params.Name, desc, params.Query, connID, createdBy, now, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create saved query: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdateSavedQuery updates an existing saved query.\nfunc (db *DB) UpdateSavedQuery(id, name, description, query, connectionID string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar desc, connID interface{}\n\tif description != \"\" {\n\t\tdesc = description\n\t}\n\tif connectionID != \"\" {\n\t\tconnID = connectionID\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`UPDATE saved_queries SET name = ?, description = ?, query = ?, connection_id = ?, updated_at = ? WHERE id = ?`,\n\t\tname, desc, query, connID, now, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update saved query: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteSavedQuery deletes a saved query by ID.\nfunc (db *DB) DeleteSavedQuery(id string) error {\n\t_, err := db.conn.Exec(\"DELETE FROM saved_queries WHERE id = ?\", id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete saved query: %w\", err)\n\t}\n\treturn nil\n}\n"
  },
  {
    "path": "internal/database/schedules.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// Schedule represents a scheduled query.\ntype Schedule struct {\n\tID           string  `json:\"id\"`\n\tName         string  `json:\"name\"`\n\tSavedQueryID string  `json:\"saved_query_id\"`\n\tConnectionID *string `json:\"connection_id\"`\n\tCron         string  `json:\"cron\"`\n\tTimezone     string  `json:\"timezone\"`\n\tEnabled      bool    `json:\"enabled\"`\n\tTimeoutMs    int     `json:\"timeout_ms\"`\n\tLastRunAt    *string `json:\"last_run_at\"`\n\tNextRunAt    *string `json:\"next_run_at\"`\n\tLastStatus   *string `json:\"last_status\"`\n\tLastError    *string `json:\"last_error\"`\n\tCreatedBy    *string `json:\"created_by\"`\n\tCreatedAt    string  `json:\"created_at\"`\n\tUpdatedAt    string  `json:\"updated_at\"`\n}\n\n// ScheduleRun represents a single execution of a scheduled query.\ntype ScheduleRun struct {\n\tID           string  `json:\"id\"`\n\tScheduleID   string  `json:\"schedule_id\"`\n\tStartedAt    string  `json:\"started_at\"`\n\tFinishedAt   *string `json:\"finished_at\"`\n\tStatus       string  `json:\"status\"`\n\tRowsAffected int     `json:\"rows_affected\"`\n\tElapsedMs    int     `json:\"elapsed_ms\"`\n\tError        *string `json:\"error\"`\n\tCreatedAt    string  `json:\"created_at\"`\n}\n\n// GetSchedules retrieves all schedules.\nfunc (db *DB) GetSchedules() ([]Schedule, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, name, saved_query_id, connection_id, cron, timezone, enabled, timeout_ms,\n\t\t        last_run_at, next_run_at, last_status, last_error, created_by, created_at, updated_at\n\t\t FROM schedules ORDER BY created_at DESC`,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get schedules: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar schedules []Schedule\n\tfor rows.Next() {\n\t\ts, err := scanSchedule(rows)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tschedules = append(schedules, s)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate schedule rows: %w\", err)\n\t}\n\treturn schedules, nil\n}\n\n// GetEnabledSchedules retrieves all enabled schedules.\nfunc (db *DB) GetEnabledSchedules() ([]Schedule, error) {\n\trows, err := db.conn.Query(\n\t\t`SELECT id, name, saved_query_id, connection_id, cron, timezone, enabled, timeout_ms,\n\t\t        last_run_at, next_run_at, last_status, last_error, created_by, created_at, updated_at\n\t\t FROM schedules WHERE enabled = 1 ORDER BY created_at DESC`,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get enabled schedules: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar schedules []Schedule\n\tfor rows.Next() {\n\t\ts, err := scanSchedule(rows)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tschedules = append(schedules, s)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate enabled schedule rows: %w\", err)\n\t}\n\treturn schedules, nil\n}\n\n// GetScheduleByID retrieves a schedule by ID.\nfunc (db *DB) GetScheduleByID(id string) (*Schedule, error) {\n\trow := db.conn.QueryRow(\n\t\t`SELECT id, name, saved_query_id, connection_id, cron, timezone, enabled, timeout_ms,\n\t\t        last_run_at, next_run_at, last_status, last_error, created_by, created_at, updated_at\n\t\t FROM schedules WHERE id = ?`, id,\n\t)\n\n\tvar s Schedule\n\tvar connID, lastRun, nextRun, lastStatus, lastError, createdBy sql.NullString\n\tvar enabled int\n\n\terr := row.Scan(&s.ID, &s.Name, &s.SavedQueryID, &connID, &s.Cron, &s.Timezone, &enabled, &s.TimeoutMs,\n\t\t&lastRun, &nextRun, &lastStatus, &lastError, &createdBy, &s.CreatedAt, &s.UpdatedAt)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get schedule by id: %w\", err)\n\t}\n\n\ts.Enabled = enabled == 1\n\ts.ConnectionID = nullStringToPtr(connID)\n\ts.LastRunAt = nullStringToPtr(lastRun)\n\ts.NextRunAt = nullStringToPtr(nextRun)\n\ts.LastStatus = nullStringToPtr(lastStatus)\n\ts.LastError = nullStringToPtr(lastError)\n\ts.CreatedBy = nullStringToPtr(createdBy)\n\treturn &s, nil\n}\n\n// CreateSchedule creates a new schedule and returns its ID.\nfunc (db *DB) CreateSchedule(name, savedQueryID, connectionID, cron, timezone, createdBy string, timeoutMs int) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar connID, creator interface{}\n\tif connectionID != \"\" {\n\t\tconnID = connectionID\n\t}\n\tif createdBy != \"\" {\n\t\tcreator = createdBy\n\t}\n\tif timezone == \"\" {\n\t\ttimezone = \"UTC\"\n\t}\n\tif timeoutMs <= 0 {\n\t\ttimeoutMs = 60000\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO schedules (id, name, saved_query_id, connection_id, cron, timezone, enabled, timeout_ms, created_by, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, 1, ?, ?, ?, ?)`,\n\t\tid, name, savedQueryID, connID, cron, timezone, timeoutMs, creator, now, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create schedule: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdateSchedule updates a schedule.\nfunc (db *DB) UpdateSchedule(id, name, cron, timezone string, enabled bool, timeoutMs int) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tenabledInt := 0\n\tif enabled {\n\t\tenabledInt = 1\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`UPDATE schedules SET name = ?, cron = ?, timezone = ?, enabled = ?, timeout_ms = ?, updated_at = ? WHERE id = ?`,\n\t\tname, cron, timezone, enabledInt, timeoutMs, now, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update schedule: %w\", err)\n\t}\n\treturn nil\n}\n\n// UpdateScheduleStatus updates the last run info for a schedule.\nfunc (db *DB) UpdateScheduleStatus(id, status, lastError string, nextRunAt *time.Time) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar errVal, nextVal interface{}\n\tif lastError != \"\" {\n\t\terrVal = lastError\n\t}\n\tif nextRunAt != nil {\n\t\tnextVal = nextRunAt.UTC().Format(time.RFC3339)\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`UPDATE schedules SET last_run_at = ?, last_status = ?, last_error = ?, next_run_at = ?, updated_at = ? WHERE id = ?`,\n\t\tnow, status, errVal, nextVal, now, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update schedule status: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteSchedule deletes a schedule and all its runs (cascade).\nfunc (db *DB) DeleteSchedule(id string) error {\n\t_, err := db.conn.Exec(\"DELETE FROM schedules WHERE id = ?\", id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete schedule: %w\", err)\n\t}\n\treturn nil\n}\n\n// CreateScheduleRun creates a new schedule run record and returns its ID.\nfunc (db *DB) CreateScheduleRun(scheduleID, status string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO schedule_runs (id, schedule_id, started_at, status, created_at)\n\t\t VALUES (?, ?, ?, ?, ?)`,\n\t\tid, scheduleID, now, status, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create schedule run: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdateScheduleRun updates a schedule run with results.\nfunc (db *DB) UpdateScheduleRun(id, status string, rowsAffected, elapsedMs int, runError string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar errVal interface{}\n\tif runError != \"\" {\n\t\terrVal = runError\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`UPDATE schedule_runs SET finished_at = ?, status = ?, rows_affected = ?, elapsed_ms = ?, error = ? WHERE id = ?`,\n\t\tnow, status, rowsAffected, elapsedMs, errVal, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update schedule run: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetScheduleRuns retrieves runs for a schedule, most recent first.\nfunc (db *DB) GetScheduleRuns(scheduleID string, limit, offset int) ([]ScheduleRun, error) {\n\tif limit <= 0 {\n\t\tlimit = 50\n\t}\n\tif offset < 0 {\n\t\toffset = 0\n\t}\n\trows, err := db.conn.Query(\n\t\t`SELECT id, schedule_id, started_at, finished_at, status, rows_affected, elapsed_ms, error, created_at\n\t\t FROM schedule_runs WHERE schedule_id = ? ORDER BY started_at DESC LIMIT ? OFFSET ?`,\n\t\tscheduleID, limit, offset,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get schedule runs: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar runs []ScheduleRun\n\tfor rows.Next() {\n\t\tvar r ScheduleRun\n\t\tvar finishedAt, runError sql.NullString\n\t\tif err := rows.Scan(&r.ID, &r.ScheduleID, &r.StartedAt, &finishedAt, &r.Status, &r.RowsAffected, &r.ElapsedMs, &runError, &r.CreatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan schedule run: %w\", err)\n\t\t}\n\t\tr.FinishedAt = nullStringToPtr(finishedAt)\n\t\tr.Error = nullStringToPtr(runError)\n\t\truns = append(runs, r)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate schedule run rows: %w\", err)\n\t}\n\treturn runs, nil\n}\n\n// scanSchedule is a helper for scanning schedule rows.\nfunc scanSchedule(rows *sql.Rows) (Schedule, error) {\n\tvar s Schedule\n\tvar connID, lastRun, nextRun, lastStatus, lastError, createdBy sql.NullString\n\tvar enabled int\n\n\terr := rows.Scan(&s.ID, &s.Name, &s.SavedQueryID, &connID, &s.Cron, &s.Timezone, &enabled, &s.TimeoutMs,\n\t\t&lastRun, &nextRun, &lastStatus, &lastError, &createdBy, &s.CreatedAt, &s.UpdatedAt)\n\tif err != nil {\n\t\treturn s, fmt.Errorf(\"scan schedule: %w\", err)\n\t}\n\n\ts.Enabled = enabled == 1\n\ts.ConnectionID = nullStringToPtr(connID)\n\ts.LastRunAt = nullStringToPtr(lastRun)\n\ts.NextRunAt = nullStringToPtr(nextRun)\n\ts.LastStatus = nullStringToPtr(lastStatus)\n\ts.LastError = nullStringToPtr(lastError)\n\ts.CreatedBy = nullStringToPtr(createdBy)\n\treturn s, nil\n}\n"
  },
  {
    "path": "internal/database/sessions.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// Session represents an authenticated session.\ntype Session struct {\n\tID                string  `json:\"id\"`\n\tConnectionID      string  `json:\"connection_id\"`\n\tClickhouseUser    string  `json:\"clickhouse_user\"`\n\tEncryptedPassword string  `json:\"encrypted_password\"`\n\tToken             string  `json:\"token\"`\n\tExpiresAt         string  `json:\"expires_at\"`\n\tUserRole          *string `json:\"user_role\"`\n\tCreatedAt         string  `json:\"created_at\"`\n}\n\n// CreateSessionParams holds parameters for creating a session.\ntype CreateSessionParams struct {\n\tConnectionID      string\n\tClickhouseUser    string\n\tEncryptedPassword string\n\tToken             string\n\tExpiresAt         string\n\tUserRole          string // defaults to \"viewer\" if empty\n}\n\n// SessionUser represents an aggregated user from sessions.\ntype SessionUser struct {\n\tUsername     string `json:\"username\"`\n\tUserRole     string `json:\"user_role\"`\n\tLastLogin    string `json:\"last_login\"`\n\tSessionCount int    `json:\"session_count\"`\n}\n\n// GetSession retrieves a session by token. Deletes and returns nil if expired.\nfunc (db *DB) GetSession(token string) (*Session, error) {\n\trow := db.conn.QueryRow(\n\t\t\"SELECT id, connection_id, clickhouse_user, encrypted_password, token, expires_at, user_role, created_at FROM sessions WHERE token = ?\",\n\t\ttoken,\n\t)\n\n\tvar s Session\n\tvar userRole sql.NullString\n\n\terr := row.Scan(\n\t\t&s.ID, &s.ConnectionID,\n\t\t&s.ClickhouseUser, &s.EncryptedPassword, &s.Token,\n\t\t&s.ExpiresAt, &userRole, &s.CreatedAt,\n\t)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get session: %w\", err)\n\t}\n\n\ts.UserRole = nullStringToPtr(userRole)\n\n\t// Check if session has expired\n\texpiresAt, err := time.Parse(time.RFC3339, s.ExpiresAt)\n\tif err != nil {\n\t\texpiresAt, err = time.Parse(\"2006-01-02T15:04:05.000Z\", s.ExpiresAt)\n\t\tif err != nil {\n\t\t\tdb.conn.Exec(\"DELETE FROM sessions WHERE id = ?\", s.ID)\n\t\t\treturn nil, nil\n\t\t}\n\t}\n\n\tif time.Now().UTC().After(expiresAt) {\n\t\tdb.conn.Exec(\"DELETE FROM sessions WHERE id = ?\", s.ID)\n\t\treturn nil, nil\n\t}\n\n\treturn &s, nil\n}\n\n// CreateSession creates a new session and returns its ID.\nfunc (db *DB) CreateSession(params CreateSessionParams) (string, error) {\n\tid := uuid.NewString()\n\n\tuserRole := params.UserRole\n\tif userRole == \"\" {\n\t\tuserRole = \"viewer\"\n\t}\n\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO sessions (id, connection_id, clickhouse_user, encrypted_password, token, expires_at, user_role)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?)`,\n\t\tid, params.ConnectionID,\n\t\tparams.ClickhouseUser, params.EncryptedPassword,\n\t\tparams.Token, params.ExpiresAt, userRole,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create session: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// DeleteSession deletes a session by its token.\nfunc (db *DB) DeleteSession(token string) error {\n\t_, err := db.conn.Exec(\"DELETE FROM sessions WHERE token = ?\", token)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete session: %w\", err)\n\t}\n\treturn nil\n}\n\n// SetSessionsUserRole updates the cached app role for all active/inactive sessions of a user.\nfunc (db *DB) SetSessionsUserRole(username, role string) error {\n\tif role == \"\" {\n\t\trole = \"viewer\"\n\t}\n\t_, err := db.conn.Exec(\"UPDATE sessions SET user_role = ? WHERE clickhouse_user = ?\", role, username)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"set sessions user role: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetUsers returns aggregated user data from sessions.\nfunc (db *DB) GetUsers() ([]SessionUser, error) {\n\trows, err := db.conn.Query(`\n\t\tSELECT\n\t\t\tclickhouse_user,\n\t\t\tuser_role,\n\t\t\tMAX(created_at) as last_login,\n\t\t\tCOUNT(*) as session_count\n\t\tFROM sessions\n\t\tGROUP BY clickhouse_user\n\t\tORDER BY last_login DESC\n\t`)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get users: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar users []SessionUser\n\tfor rows.Next() {\n\t\tvar u SessionUser\n\t\tvar userRole sql.NullString\n\t\tif err := rows.Scan(&u.Username, &userRole, &u.LastLogin, &u.SessionCount); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan user: %w\", err)\n\t\t}\n\t\tif userRole.Valid {\n\t\t\tu.UserRole = userRole.String\n\t\t} else {\n\t\t\tu.UserRole = \"viewer\"\n\t\t}\n\t\tusers = append(users, u)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate user rows: %w\", err)\n\t}\n\treturn users, nil\n}\n\n// GetActiveSessionsByConnection returns up to limit active sessions for a connection,\n// ordered by most recently created first.\nfunc (db *DB) GetActiveSessionsByConnection(connectionID string, limit int) ([]Session, error) {\n\tif limit <= 0 {\n\t\tlimit = 5\n\t}\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\trows, err := db.conn.Query(\n\t\t`SELECT id, connection_id, clickhouse_user, encrypted_password, token, expires_at, user_role, created_at\n\t\t FROM sessions\n\t\t WHERE connection_id = ? AND expires_at > ?\n\t\t ORDER BY created_at DESC\n\t\t LIMIT ?`,\n\t\tconnectionID, now, limit,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get active sessions by connection: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tsessions := make([]Session, 0, limit)\n\tfor rows.Next() {\n\t\tvar s Session\n\t\tvar role sql.NullString\n\t\tif err := rows.Scan(\n\t\t\t&s.ID, &s.ConnectionID,\n\t\t\t&s.ClickhouseUser, &s.EncryptedPassword, &s.Token,\n\t\t\t&s.ExpiresAt, &role, &s.CreatedAt,\n\t\t); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan active session: %w\", err)\n\t\t}\n\t\ts.UserRole = nullStringToPtr(role)\n\t\tsessions = append(sessions, s)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate active sessions: %w\", err)\n\t}\n\treturn sessions, nil\n}\n"
  },
  {
    "path": "internal/database/settings.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"strings\"\n\t\"time\"\n)\n\n// Setting keys for governance features.\nconst (\n\tSettingGovernanceSyncEnabled     = \"governance.sync_enabled\"\n\tSettingGovernanceUpgradeBanner   = \"governance.upgrade_banner_dismissed\"\n\tSettingGovernanceSyncUpdatedBy   = \"governance.sync_updated_by\"\n\tSettingGovernanceSyncUpdatedAt   = \"governance.sync_updated_at\"\n)\n\n// GovernanceSyncEnabled reports whether admins have opted in to the governance\n// background sync. Unset keys default to false (opt-in semantics).\nfunc (db *DB) GovernanceSyncEnabled() bool {\n\tv, _ := db.GetSetting(SettingGovernanceSyncEnabled)\n\treturn strings.EqualFold(strings.TrimSpace(v), \"true\")\n}\n\n// SetGovernanceSyncEnabled stores the opt-in flag plus who/when toggled it.\nfunc (db *DB) SetGovernanceSyncEnabled(enabled bool, actor string) error {\n\tval := \"false\"\n\tif enabled {\n\t\tval = \"true\"\n\t}\n\tif err := db.SetSetting(SettingGovernanceSyncEnabled, val); err != nil {\n\t\treturn err\n\t}\n\tif err := db.SetSetting(SettingGovernanceSyncUpdatedBy, actor); err != nil {\n\t\treturn err\n\t}\n\treturn db.SetSetting(SettingGovernanceSyncUpdatedAt, time.Now().UTC().Format(time.RFC3339))\n}\n\n// GetSetting retrieves a setting value by key. Returns empty string if not found.\nfunc (db *DB) GetSetting(key string) (string, error) {\n\tvar value string\n\terr := db.conn.QueryRow(\"SELECT value FROM settings WHERE key = ?\", key).Scan(&value)\n\tif err == sql.ErrNoRows {\n\t\treturn \"\", nil\n\t}\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"get setting: %w\", err)\n\t}\n\treturn value, nil\n}\n\n// SetSetting sets or updates a setting value (upsert).\nfunc (db *DB) SetSetting(key, value string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO settings (key, value, updated_at) VALUES (?, ?, ?)\n\t\t ON CONFLICT(key) DO UPDATE SET value = excluded.value, updated_at = excluded.updated_at`,\n\t\tkey, value, now,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"set setting: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetAllSettings retrieves all settings as a map.\nfunc (db *DB) GetAllSettings() (map[string]string, error) {\n\trows, err := db.conn.Query(\"SELECT key, value FROM settings\")\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get all settings: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tsettings := make(map[string]string)\n\tfor rows.Next() {\n\t\tvar key, value string\n\t\tif err := rows.Scan(&key, &value); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan setting: %w\", err)\n\t\t}\n\t\tsettings[key] = value\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate setting rows: %w\", err)\n\t}\n\treturn settings, nil\n}\n\n// DeleteSetting removes a setting by key.\nfunc (db *DB) DeleteSetting(key string) error {\n\t_, err := db.conn.Exec(\"DELETE FROM settings WHERE key = ?\", key)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete setting: %w\", err)\n\t}\n\treturn nil\n}\n"
  },
  {
    "path": "internal/database/user_roles.go",
    "content": "package database\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n)\n\n// UserRole represents a CH-UI role assignment for a ClickHouse user.\ntype UserRole struct {\n\tUsername  string `json:\"username\"`\n\tRole      string `json:\"role\"`\n\tCreatedAt string `json:\"created_at\"`\n}\n\n// GetUserRole retrieves the CH-UI role for a user.\n// Returns empty string if not set (meaning auto-detect from ClickHouse).\nfunc (db *DB) GetUserRole(username string) (string, error) {\n\tvar role string\n\terr := db.conn.QueryRow(\"SELECT role FROM user_roles WHERE username = ?\", username).Scan(&role)\n\tif err != nil {\n\t\tif err.Error() == \"sql: no rows in result set\" {\n\t\t\treturn \"\", nil\n\t\t}\n\t\treturn \"\", fmt.Errorf(\"get user role: %w\", err)\n\t}\n\treturn role, nil\n}\n\n// SetUserRole sets or updates the CH-UI role for a user (upsert).\nfunc (db *DB) SetUserRole(username, role string) error {\n\t_, err := db.conn.Exec(\n\t\t`INSERT INTO user_roles (username, role) VALUES (?, ?)\n\t\t ON CONFLICT(username) DO UPDATE SET role = excluded.role`,\n\t\tusername, role,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"set user role: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteUserRole removes the CH-UI role assignment for a user (reverts to auto-detect).\nfunc (db *DB) DeleteUserRole(username string) error {\n\t_, err := db.conn.Exec(\"DELETE FROM user_roles WHERE username = ?\", username)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete user role: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetAllUserRoles retrieves all CH-UI role assignments.\nfunc (db *DB) GetAllUserRoles() ([]UserRole, error) {\n\trows, err := db.conn.Query(\"SELECT username, role, created_at FROM user_roles ORDER BY username ASC\")\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get all user roles: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar roles []UserRole\n\tfor rows.Next() {\n\t\tvar r UserRole\n\t\tif err := rows.Scan(&r.Username, &r.Role, &r.CreatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan user role: %w\", err)\n\t\t}\n\t\troles = append(roles, r)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate user role rows: %w\", err)\n\t}\n\treturn roles, nil\n}\n\n// CountUsersWithRole returns the number of users currently assigned a given CH-UI role.\nfunc (db *DB) CountUsersWithRole(role string) (int, error) {\n\tvar count int\n\terr := db.conn.QueryRow(\"SELECT COUNT(*) FROM user_roles WHERE role = ?\", role).Scan(&count)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"count users with role: %w\", err)\n\t}\n\treturn count, nil\n}\n\n// IsUserRole returns true if username currently has the given explicit role in CH-UI.\nfunc (db *DB) IsUserRole(username, role string) (bool, error) {\n\tvar exists int\n\terr := db.conn.QueryRow(\n\t\t\"SELECT 1 FROM user_roles WHERE username = ? AND role = ? LIMIT 1\",\n\t\tusername, role,\n\t).Scan(&exists)\n\tif err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn false, nil\n\t\t}\n\t\treturn false, fmt.Errorf(\"is user role: %w\", err)\n\t}\n\treturn exists == 1, nil\n}\n"
  },
  {
    "path": "internal/embedded/embedded.go",
    "content": "package embedded\n\nimport (\n\t\"fmt\"\n\t\"log/slog\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/connector\"\n\tconnconfig \"github.com/caioricciuti/ch-ui/connector/config\"\n\t\"github.com/caioricciuti/ch-ui/connector/ui\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/license\"\n)\n\n// EmbeddedAgent manages an in-process tunnel connector that connects\n// to the local CH-UI server for the embedded ClickHouse connection.\ntype EmbeddedAgent struct {\n\tconn *connector.Connector\n\tdb   *database.DB\n}\n\n// Start creates the embedded connection record if needed and launches\n// the in-process connector. It should be called after the HTTP server is\n// listening so the WebSocket endpoint is available.\nfunc Start(db *database.DB, port int, clickhouseURL, connectionName string) (*EmbeddedAgent, error) {\n\tif clickhouseURL == \"\" {\n\t\tslog.Info(\"No CLICKHOUSE_URL configured, skipping embedded agent\")\n\t\treturn nil, nil\n\t}\n\tconnectionName = strings.TrimSpace(connectionName)\n\tif connectionName == \"\" {\n\t\tconnectionName = \"Local ClickHouse\"\n\t}\n\n\t// Ensure an embedded connection record exists\n\tdbConn, err := db.GetEmbeddedConnection()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"check embedded connection: %w\", err)\n\t}\n\n\tif dbConn == nil {\n\t\ttoken := license.GenerateTunnelToken()\n\t\tid, err := db.CreateConnection(connectionName, token, true)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"create embedded connection: %w\", err)\n\t\t}\n\t\tslog.Info(\"Created embedded connection\", \"id\", id)\n\n\t\tdbConn, err = db.GetConnectionByID(id)\n\t\tif err != nil || dbConn == nil {\n\t\t\treturn nil, fmt.Errorf(\"fetch created embedded connection: %w\", err)\n\t\t}\n\t} else if strings.TrimSpace(dbConn.Name) != connectionName {\n\t\tif err := db.UpdateConnectionName(dbConn.ID, connectionName); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"update embedded connection name: %w\", err)\n\t\t}\n\t\tdbConn.Name = connectionName\n\t\tslog.Info(\"Updated embedded connection name\", \"id\", dbConn.ID, \"name\", connectionName)\n\t}\n\n\ttunnelURL := fmt.Sprintf(\"ws://127.0.0.1:%d/connect\", port)\n\n\tcfg := &connconfig.Config{\n\t\tTunnelURL:         tunnelURL,\n\t\tToken:             dbConn.TunnelToken,\n\t\tClickHouseURL:     clickhouseURL,\n\t\tTakeover:          true, // Always takeover on startup\n\t\tHeartbeatInterval: 30 * time.Second,\n\t\tReconnectDelay:    2 * time.Second,\n\t\tMaxReconnectDelay: 30 * time.Second,\n\t}\n\n\t// Use quiet mode for the embedded agent (suppresses terminal output)\n\tu := ui.New(true, true, false, false)\n\n\tconn := connector.New(cfg, u)\n\n\tea := &EmbeddedAgent{\n\t\tconn: conn,\n\t\tdb:   db,\n\t}\n\n\tgo func() {\n\t\t// Small delay to let the HTTP server start accepting connections\n\t\ttime.Sleep(500 * time.Millisecond)\n\t\tslog.Info(\"Starting embedded agent\", \"clickhouse_url\", clickhouseURL, \"tunnel_url\", tunnelURL)\n\t\tif err := conn.Run(); err != nil {\n\t\t\tslog.Error(\"Embedded agent exited with error\", \"error\", err)\n\t\t}\n\t}()\n\n\treturn ea, nil\n}\n\n// Stop gracefully shuts down the embedded agent.\nfunc (ea *EmbeddedAgent) Stop() {\n\tif ea != nil && ea.conn != nil {\n\t\tslog.Info(\"Stopping embedded connector\")\n\t\tea.conn.Shutdown()\n\t}\n}\n"
  },
  {
    "path": "internal/governance/guardrails.go",
    "content": "package governance\n\nimport (\n\t\"fmt\"\n\t\"log/slog\"\n\t\"regexp\"\n\t\"sort\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/alerts\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n)\n\nconst defaultGuardrailStaleAfter = 10 * time.Minute\n\nvar showTablesFromRe = regexp.MustCompile(`(?i)\\bSHOW\\s+TABLES\\s+(?:FROM|IN)\\s+` + tableRefPattern)\n\ntype guardrailStore interface {\n\tGetEnabledPolicies(connectionID string) ([]Policy, error)\n\tGetAccessMatrixForUser(connectionID, userName string) ([]AccessMatrixEntry, error)\n\tGetSyncState(connectionID string, syncType string) (*SyncState, error)\n\tCreateViolation(connectionID, policyID, queryLogID, user, detail, severity, detectionPhase, requestEndpoint string) (string, error)\n\tUpsertIncidentFromViolation(connectionID, sourceRef, policyName, user, severity, detail string) (string, bool, error)\n}\n\ntype alertEventWriter interface {\n\tCreateAlertEvent(connectionID *string, eventType, severity, title, message string, payload interface{}, fingerprint, sourceRef string) (string, error)\n}\n\ntype GuardrailService struct {\n\tstore      guardrailStore\n\talerts     alertEventWriter\n\tstaleAfter time.Duration\n\tnow        func() time.Time\n}\n\ntype GuardrailDecision struct {\n\tAllowed bool\n\tBlock   *GuardrailBlock\n}\n\ntype GuardrailBlock struct {\n\tPolicyID        string\n\tPolicyName      string\n\tSeverity        string\n\tEnforcementMode string\n\tViolationID     string\n\tDetail          string\n}\n\nfunc NewGuardrailService(store *Store, db *database.DB) *GuardrailService {\n\treturn &GuardrailService{\n\t\tstore:      store,\n\t\talerts:     db,\n\t\tstaleAfter: defaultGuardrailStaleAfter,\n\t\tnow:        time.Now,\n\t}\n}\n\nfunc (s *GuardrailService) EvaluateQuery(connectionID, user, queryText, requestEndpoint string) (GuardrailDecision, error) {\n\ttablesUsed := extractPolicyTablesFromQuery(queryText)\n\treturn s.evaluate(connectionID, user, queryText, tablesUsed, requestEndpoint)\n}\n\nfunc (s *GuardrailService) EvaluateTable(connectionID, user, databaseName, tableName, requestEndpoint string) (GuardrailDecision, error) {\n\tdb := strings.TrimSpace(databaseName)\n\ttbl := strings.TrimSpace(tableName)\n\tif db == \"\" || tbl == \"\" {\n\t\treturn GuardrailDecision{Allowed: true}, nil\n\t}\n\tqueryText := fmt.Sprintf(\"SELECT * FROM `%s`.`%s`\", db, tbl)\n\ttablesUsed := []string{db + \".\" + tbl}\n\treturn s.evaluate(connectionID, user, queryText, tablesUsed, requestEndpoint)\n}\n\nfunc (s *GuardrailService) evaluate(connectionID, user, queryText string, tablesUsed []string, requestEndpoint string) (GuardrailDecision, error) {\n\tpolicies, err := s.store.GetEnabledPolicies(connectionID)\n\tif err != nil {\n\t\treturn GuardrailDecision{}, fmt.Errorf(\"load enabled policies: %w\", err)\n\t}\n\tif len(policies) == 0 {\n\t\treturn GuardrailDecision{Allowed: true}, nil\n\t}\n\n\tuncertain, uncertainReason := s.isAccessStateUncertain(connectionID)\n\tif uncertain {\n\t\ts.emitUncertainGuardrailEvent(connectionID, user, queryText, requestEndpoint, uncertainReason)\n\t\treturn GuardrailDecision{Allowed: true}, nil\n\t}\n\n\tmatrixEntries, err := s.store.GetAccessMatrixForUser(connectionID, user)\n\tif err != nil {\n\t\ts.emitUncertainGuardrailEvent(connectionID, user, queryText, requestEndpoint, \"access matrix lookup failed\")\n\t\treturn GuardrailDecision{Allowed: true}, nil\n\t}\n\n\tuserRoles := collectUserRoles(matrixEntries)\n\tblockingPolicies := make([]Policy, 0)\n\n\tfor _, policy := range policies {\n\t\tif !policy.Enabled {\n\t\t\tcontinue\n\t\t}\n\t\tif !queryTouchesObject(tablesUsed, queryText, policy) {\n\t\t\tcontinue\n\t\t}\n\t\tif hasRole(userRoles, policy.RequiredRole) {\n\t\t\tcontinue\n\t\t}\n\t\tif normalizePolicyEnforcementMode(policy.EnforcementMode) == \"block\" {\n\t\t\tblockingPolicies = append(blockingPolicies, policy)\n\t\t}\n\t}\n\n\tif len(blockingPolicies) == 0 {\n\t\treturn GuardrailDecision{Allowed: true}, nil\n\t}\n\n\tselected := pickBlockingPolicy(blockingPolicies)\n\tdetail := fmt.Sprintf(\n\t\t\"Query blocked before execution: user %q touched %s without required role %q\",\n\t\tuser,\n\t\tdescribePolicyObject(selected),\n\t\tselected.RequiredRole,\n\t)\n\tseverity := normalizeGuardrailSeverity(selected.Severity)\n\n\tviolationID := \"\"\n\tcreatedViolationID, err := s.store.CreateViolation(\n\t\tconnectionID,\n\t\tselected.ID,\n\t\t\"\",\n\t\tuser,\n\t\tdetail,\n\t\tseverity,\n\t\t\"pre_exec_block\",\n\t\trequestEndpoint,\n\t)\n\tif err != nil {\n\t\tslog.Warn(\"Failed to persist pre-exec guardrail violation\", \"connection\", connectionID, \"policy_id\", selected.ID, \"error\", err)\n\t} else {\n\t\tviolationID = createdViolationID\n\t\tif _, _, err := s.store.UpsertIncidentFromViolation(connectionID, violationID, selected.Name, user, severity, detail); err != nil {\n\t\t\tslog.Warn(\"Failed to upsert incident from pre-exec guardrail violation\", \"violation_id\", violationID, \"error\", err)\n\t\t}\n\t}\n\n\tif s.alerts != nil {\n\t\tfingerprint := fmt.Sprintf(\"policy:%s:user:%s:hash:%s\", selected.ID, user, hashNormalized(normalizeQuery(queryText)))\n\t\tpayload := map[string]interface{}{\n\t\t\t\"guardrail_status\":   \"blocked\",\n\t\t\t\"policy_id\":          selected.ID,\n\t\t\t\"policy_name\":        selected.Name,\n\t\t\t\"query_hash\":         hashNormalized(normalizeQuery(queryText)),\n\t\t\t\"request_endpoint\":   requestEndpoint,\n\t\t\t\"violation_id\":       violationID,\n\t\t\t\"violation_severity\": severity,\n\t\t\t\"detection_phase\":    \"pre_exec_block\",\n\t\t\t\"enforcement_mode\":   \"block\",\n\t\t\t\"blocked_user\":       user,\n\t\t}\n\t\tsourceRef := violationID\n\t\tif _, err := s.alerts.CreateAlertEvent(\n\t\t\t&connectionID,\n\t\t\talerts.EventTypePolicyViolation,\n\t\t\tseverity,\n\t\t\tfmt.Sprintf(\"Policy blocked query: %s\", strings.TrimSpace(selected.Name)),\n\t\t\tdetail,\n\t\t\tpayload,\n\t\t\tfingerprint,\n\t\t\tsourceRef,\n\t\t); err != nil {\n\t\t\tslog.Warn(\"Failed to create blocked guardrail alert event\", \"connection\", connectionID, \"policy_id\", selected.ID, \"error\", err)\n\t\t}\n\t}\n\n\treturn GuardrailDecision{\n\t\tAllowed: false,\n\t\tBlock: &GuardrailBlock{\n\t\t\tPolicyID:        selected.ID,\n\t\t\tPolicyName:      selected.Name,\n\t\t\tSeverity:        severity,\n\t\t\tEnforcementMode: \"block\",\n\t\t\tViolationID:     violationID,\n\t\t\tDetail:          detail,\n\t\t},\n\t}, nil\n}\n\nfunc (s *GuardrailService) isAccessStateUncertain(connectionID string) (bool, string) {\n\tstate, err := s.store.GetSyncState(connectionID, string(SyncAccess))\n\tif err != nil {\n\t\treturn true, \"failed to read governance access sync state\"\n\t}\n\tif state == nil {\n\t\treturn true, \"governance access sync state missing\"\n\t}\n\tif strings.EqualFold(strings.TrimSpace(state.Status), \"error\") {\n\t\treturn true, \"governance access sync state is error\"\n\t}\n\tif state.LastSyncedAt == nil || strings.TrimSpace(*state.LastSyncedAt) == \"\" {\n\t\treturn true, \"governance access sync has no successful sync timestamp\"\n\t}\n\n\tlastSyncedAt, err := time.Parse(time.RFC3339, strings.TrimSpace(*state.LastSyncedAt))\n\tif err != nil {\n\t\treturn true, \"governance access sync timestamp is invalid\"\n\t}\n\tif s.now().UTC().Sub(lastSyncedAt.UTC()) > s.staleAfter {\n\t\treturn true, \"governance access sync state is stale\"\n\t}\n\treturn false, \"\"\n}\n\nfunc (s *GuardrailService) emitUncertainGuardrailEvent(connectionID, user, queryText, requestEndpoint, reason string) {\n\tif s.alerts == nil {\n\t\treturn\n\t}\n\tqueryHash := hashNormalized(normalizeQuery(queryText))\n\tfingerprint := fmt.Sprintf(\"guardrail:uncertain:user:%s:hash:%s:endpoint:%s\", user, queryHash, requestEndpoint)\n\tpayload := map[string]interface{}{\n\t\t\"guardrail_status\": \"uncertain\",\n\t\t\"reason\":           reason,\n\t\t\"request_endpoint\": requestEndpoint,\n\t\t\"query_hash\":       queryHash,\n\t\t\"ch_user\":          user,\n\t}\n\tif _, err := s.alerts.CreateAlertEvent(\n\t\t&connectionID,\n\t\talerts.EventTypePolicyViolation,\n\t\talerts.SeverityWarn,\n\t\t\"Guardrail evaluation uncertain\",\n\t\t\"Guardrail pre-execution evaluation could not be trusted; query was allowed\",\n\t\tpayload,\n\t\tfingerprint,\n\t\t\"\",\n\t); err != nil {\n\t\tslog.Warn(\"Failed to create uncertain guardrail alert event\", \"connection\", connectionID, \"error\", err)\n\t}\n}\n\nfunc pickBlockingPolicy(policies []Policy) Policy {\n\tordered := make([]Policy, len(policies))\n\tcopy(ordered, policies)\n\tsort.SliceStable(ordered, func(i, j int) bool {\n\t\tleft := ordered[i]\n\t\tright := ordered[j]\n\t\tlp := guardrailSeverityPriority(left.Severity)\n\t\trp := guardrailSeverityPriority(right.Severity)\n\t\tif lp != rp {\n\t\t\treturn lp > rp\n\t\t}\n\t\tln := strings.ToLower(strings.TrimSpace(left.Name))\n\t\trn := strings.ToLower(strings.TrimSpace(right.Name))\n\t\tif ln != rn {\n\t\t\treturn ln < rn\n\t\t}\n\t\treturn strings.ToLower(strings.TrimSpace(left.ID)) < strings.ToLower(strings.TrimSpace(right.ID))\n\t})\n\treturn ordered[0]\n}\n\nfunc guardrailSeverityPriority(v string) int {\n\tswitch normalizeGuardrailSeverity(v) {\n\tcase \"critical\":\n\t\treturn 4\n\tcase \"error\":\n\t\treturn 3\n\tcase \"warn\":\n\t\treturn 2\n\tcase \"info\":\n\t\treturn 1\n\tdefault:\n\t\treturn 0\n\t}\n}\n\nfunc normalizeGuardrailSeverity(v string) string {\n\tswitch strings.ToLower(strings.TrimSpace(v)) {\n\tcase \"critical\":\n\t\treturn \"critical\"\n\tcase \"error\":\n\t\treturn \"error\"\n\tcase \"info\":\n\t\treturn \"info\"\n\tdefault:\n\t\treturn \"warn\"\n\t}\n}\n\nfunc extractPolicyTablesFromQuery(queryText string) []string {\n\tquery := normaliseWhitespace(queryText)\n\tseen := make(map[string]bool, 16)\n\tout := make([]string, 0, 8)\n\tisShowTablesQuery := showTablesFromRe.MatchString(query)\n\taddTable := func(dbName, tableName string) {\n\t\tdbName = strings.TrimSpace(dbName)\n\t\ttableName = strings.TrimSpace(tableName)\n\t\tif tableName == \"\" {\n\t\t\treturn\n\t\t}\n\t\tkey := tableName\n\t\tval := tableName\n\t\tif dbName != \"\" {\n\t\t\tkey = strings.ToLower(dbName + \".\" + tableName)\n\t\t\tval = dbName + \".\" + tableName\n\t\t} else {\n\t\t\tkey = strings.ToLower(tableName)\n\t\t}\n\t\tif seen[key] {\n\t\t\treturn\n\t\t}\n\t\tseen[key] = true\n\t\tout = append(out, val)\n\t}\n\taddDatabase := func(dbName string) {\n\t\tdbName = strings.TrimSpace(dbName)\n\t\tif dbName == \"\" {\n\t\t\treturn\n\t\t}\n\t\tkey := strings.ToLower(dbName + \".__all_tables__\")\n\t\tif seen[key] {\n\t\t\treturn\n\t\t}\n\t\tseen[key] = true\n\t\tout = append(out, dbName+\".__all_tables__\")\n\t}\n\n\tif !isShowTablesQuery {\n\t\tfor _, src := range extractSourceTables(query) {\n\t\t\taddTable(src.Database, src.Table)\n\t\t}\n\t}\n\tif target := extractTarget(query); target != nil {\n\t\taddTable(target.Database, target.Table)\n\t}\n\tfor _, match := range showTablesFromRe.FindAllStringSubmatch(query, -1) {\n\t\tif len(match) < 2 {\n\t\t\tcontinue\n\t\t}\n\t\traw := stripBackticks(strings.TrimSpace(match[1]))\n\t\tif raw == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tparts := strings.SplitN(raw, \".\", 2)\n\t\tif len(parts) == 2 {\n\t\t\taddDatabase(stripBackticks(parts[0]))\n\t\t\tcontinue\n\t\t}\n\t\taddDatabase(stripBackticks(raw))\n\t}\n\treturn out\n}\n"
  },
  {
    "path": "internal/governance/guardrails_test.go",
    "content": "package governance\n\nimport (\n\t\"path/filepath\"\n\t\"testing\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n)\n\ntype guardrailTestContext struct {\n\tdb      *database.DB\n\tstore   *Store\n\tservice *GuardrailService\n\tconnID  string\n}\n\nfunc newGuardrailTestContext(t *testing.T) *guardrailTestContext {\n\tt.Helper()\n\n\tdbPath := filepath.Join(t.TempDir(), \"guardrails.db\")\n\tdb, err := database.Open(dbPath)\n\tif err != nil {\n\t\tt.Fatalf(\"open db: %v\", err)\n\t}\n\tt.Cleanup(func() { _ = db.Close() })\n\n\tctx := &guardrailTestContext{\n\t\tdb:     db,\n\t\tstore:  NewStore(db),\n\t\tconnID: \"conn-1\",\n\t}\n\tctx.service = NewGuardrailService(ctx.store, db)\n\n\tif _, err := db.Conn().Exec(\n\t\t`INSERT INTO connections (id, name, tunnel_token, status) VALUES (?, ?, ?, ?)`,\n\t\tctx.connID,\n\t\t\"Local\",\n\t\t\"token-1\",\n\t\t\"connected\",\n\t); err != nil {\n\t\tt.Fatalf(\"insert connection: %v\", err)\n\t}\n\n\treturn ctx\n}\n\nfunc (c *guardrailTestContext) setAccessSyncFresh(t *testing.T) {\n\tt.Helper()\n\tif err := c.store.UpsertSyncState(c.connID, string(SyncAccess), \"idle\", nil, nil, 0); err != nil {\n\t\tt.Fatalf(\"upsert access sync state: %v\", err)\n\t}\n}\n\nfunc (c *guardrailTestContext) createPolicy(t *testing.T, name, severity, mode string) string {\n\tt.Helper()\n\tid, err := c.store.CreatePolicy(\n\t\tc.connID,\n\t\tname,\n\t\t\"\",\n\t\t\"table\",\n\t\t\"db\",\n\t\t\"tbl\",\n\t\t\"\",\n\t\t\"analyst\",\n\t\tseverity,\n\t\tmode,\n\t\t\"admin\",\n\t)\n\tif err != nil {\n\t\tt.Fatalf(\"create policy: %v\", err)\n\t}\n\treturn id\n}\n\nfunc TestGuardrailsWarnPolicyAllowsExecution(t *testing.T) {\n\tctx := newGuardrailTestContext(t)\n\tctx.setAccessSyncFresh(t)\n\tctx.createPolicy(t, \"warn-policy\", \"warn\", \"warn\")\n\n\tdecision, err := ctx.service.EvaluateQuery(ctx.connID, \"alice\", \"SELECT * FROM db.tbl\", \"/api/query/run\")\n\tif err != nil {\n\t\tt.Fatalf(\"evaluate query: %v\", err)\n\t}\n\tif !decision.Allowed {\n\t\tt.Fatalf(\"expected query to be allowed, got blocked: %+v\", decision.Block)\n\t}\n}\n\nfunc TestGuardrailsBlockPolicyBlocksAndPersistsViolation(t *testing.T) {\n\tctx := newGuardrailTestContext(t)\n\tctx.setAccessSyncFresh(t)\n\tpolicyID := ctx.createPolicy(t, \"block-policy\", \"critical\", \"block\")\n\n\tdecision, err := ctx.service.EvaluateQuery(ctx.connID, \"alice\", \"SELECT * FROM db.tbl\", \"/api/query/run\")\n\tif err != nil {\n\t\tt.Fatalf(\"evaluate query: %v\", err)\n\t}\n\tif decision.Allowed || decision.Block == nil {\n\t\tt.Fatalf(\"expected blocked decision, got %+v\", decision)\n\t}\n\tif decision.Block.PolicyID != policyID {\n\t\tt.Fatalf(\"unexpected blocked policy: got %s want %s\", decision.Block.PolicyID, policyID)\n\t}\n\tif decision.Block.EnforcementMode != \"block\" {\n\t\tt.Fatalf(\"unexpected enforcement mode: %s\", decision.Block.EnforcementMode)\n\t}\n\n\tvar detectionPhase, requestEndpoint string\n\tif err := ctx.db.Conn().QueryRow(\n\t\t`SELECT detection_phase, COALESCE(request_endpoint, '') FROM gov_policy_violations WHERE id = ?`,\n\t\tdecision.Block.ViolationID,\n\t).Scan(&detectionPhase, &requestEndpoint); err != nil {\n\t\tt.Fatalf(\"load persisted violation: %v\", err)\n\t}\n\tif detectionPhase != \"pre_exec_block\" {\n\t\tt.Fatalf(\"unexpected detection phase: %s\", detectionPhase)\n\t}\n\tif requestEndpoint != \"/api/query/run\" {\n\t\tt.Fatalf(\"unexpected request endpoint: %s\", requestEndpoint)\n\t}\n}\n\nfunc TestGuardrailsPickDeterministicBlockingPolicy(t *testing.T) {\n\tctx := newGuardrailTestContext(t)\n\tctx.setAccessSyncFresh(t)\n\tctx.createPolicy(t, \"zzz\", \"warn\", \"block\")\n\texpected := ctx.createPolicy(t, \"aaa\", \"warn\", \"block\")\n\tctx.createPolicy(t, \"high\", \"critical\", \"warn\")\n\n\tdecision, err := ctx.service.EvaluateQuery(ctx.connID, \"alice\", \"SELECT * FROM db.tbl\", \"/api/query/run\")\n\tif err != nil {\n\t\tt.Fatalf(\"evaluate query: %v\", err)\n\t}\n\tif decision.Allowed || decision.Block == nil {\n\t\tt.Fatalf(\"expected blocked decision, got %+v\", decision)\n\t}\n\tif decision.Block.PolicyID != expected {\n\t\tt.Fatalf(\"expected lexical tiebreak policy %s, got %s\", expected, decision.Block.PolicyID)\n\t}\n}\n\nfunc TestGuardrailsUncertainAccessStateAllowsAndEmitsAlert(t *testing.T) {\n\tctx := newGuardrailTestContext(t)\n\tctx.createPolicy(t, \"block-policy\", \"warn\", \"block\")\n\n\tdecision, err := ctx.service.EvaluateQuery(ctx.connID, \"alice\", \"SELECT * FROM db.tbl\", \"/api/query/run\")\n\tif err != nil {\n\t\tt.Fatalf(\"evaluate query: %v\", err)\n\t}\n\tif !decision.Allowed {\n\t\tt.Fatalf(\"expected allowed decision when access sync state is uncertain\")\n\t}\n\n\tvar count int\n\tif err := ctx.db.Conn().QueryRow(\n\t\t`SELECT COUNT(*) FROM alert_events WHERE event_type = ? AND title = ?`,\n\t\t\"policy.violation\",\n\t\t\"Guardrail evaluation uncertain\",\n\t).Scan(&count); err != nil {\n\t\tt.Fatalf(\"count uncertain guardrail alerts: %v\", err)\n\t}\n\tif count != 1 {\n\t\tt.Fatalf(\"expected 1 uncertain guardrail alert event, got %d\", count)\n\t}\n}\n\nfunc TestExtractPolicyTablesFromQuery(t *testing.T) {\n\ttests := []struct {\n\t\tname  string\n\t\tquery string\n\t\twant  []string\n\t}{\n\t\t{name: \"select join\", query: \"SELECT * FROM db.tbl a JOIN db2.tbl2 b ON a.id=b.id\", want: []string{\"db.tbl\", \"db2.tbl2\"}},\n\t\t{name: \"insert select\", query: \"INSERT INTO db.target SELECT * FROM db.source\", want: []string{\"db.source\", \"db.target\"}},\n\t\t{name: \"show tables from\", query: \"SHOW TABLES FROM db\", want: []string{\"db.__all_tables__\"}},\n\t}\n\n\tfor _, tc := range tests {\n\t\tt.Run(tc.name, func(t *testing.T) {\n\t\t\tgot := extractPolicyTablesFromQuery(tc.query)\n\t\t\tif len(got) != len(tc.want) {\n\t\t\t\tt.Fatalf(\"unexpected result size: got=%v want=%v\", got, tc.want)\n\t\t\t}\n\t\t\tfor i := range tc.want {\n\t\t\t\tif got[i] != tc.want[i] {\n\t\t\t\t\tt.Fatalf(\"unexpected table at %d: got=%s want=%s\", i, got[i], tc.want[i])\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "internal/governance/harvester_access.go",
    "content": "package governance\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// Default number of inactive days to consider a permission as \"over-granted\".\nconst overPermissionInactiveDays = 30\n\n// syncAccess harvests user, role, grant, and role_grant data from ClickHouse\n// system tables, upserts them into SQLite, and rebuilds the access matrix.\nfunc (s *Syncer) syncAccess(ctx context.Context, creds CHCredentials) (*AccessSyncResult, error) {\n\tconnID := creds.ConnectionID\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\t// Update sync state to running\n\tif err := s.store.UpsertSyncState(connID, string(SyncAccess), \"running\", nil, nil, 0); err != nil {\n\t\tslog.Error(\"Failed to update sync state\", \"error\", err)\n\t}\n\n\tresult := &AccessSyncResult{}\n\tvar syncErr error\n\n\tdefer func() {\n\t\tstatus := \"idle\"\n\t\tvar errMsg *string\n\t\tif syncErr != nil {\n\t\t\tstatus = \"error\"\n\t\t\te := syncErr.Error()\n\t\t\terrMsg = &e\n\t\t}\n\t\ttotalRows := result.UsersSynced + result.RolesSynced + result.GrantsSynced\n\t\tif err := s.store.UpsertSyncState(connID, string(SyncAccess), status, &now, errMsg, totalRows); err != nil {\n\t\t\tslog.Error(\"Failed to update sync state after access sync\", \"error\", err)\n\t\t}\n\t}()\n\n\t// ── Phase 1: Users ──────────────────────────────────────────────────────\n\tuserRows, err := s.executeQuery(creds,\n\t\t`SELECT\n\t\t\tname,\n\t\t\ttoString(auth_type) AS auth_type,\n\t\t\ttoString(host_ip) AS host_ip,\n\t\t\tdefault_roles_all,\n\t\t\ttoString(default_roles_list) AS default_roles_list\n\t\t FROM system.users\n\t\t ORDER BY name`)\n\tif err != nil {\n\t\tslog.Warn(\"Access sync: failed to query system.users with role fields, trying fallback\", \"connection\", connID, \"error\", err)\n\t\tuserRows, err = s.executeQuery(creds,\n\t\t\t`SELECT\n\t\t\t\tname,\n\t\t\t\ttoString(auth_type) AS auth_type,\n\t\t\t\ttoString(host_ip) AS host_ip,\n\t\t\t\t0 AS default_roles_all,\n\t\t\t\t'' AS default_roles_list\n\t\t\t FROM system.users\n\t\t\t ORDER BY name`)\n\t\tif err != nil {\n\t\t\tslog.Warn(\"Access sync: fallback query for system.users failed\", \"connection\", connID, \"error\", err)\n\t\t\tuserRows = nil\n\t\t}\n\t} else {\n\t\t// no-op; rows handled below\n\t}\n\n\tusersFetched := err == nil\n\tif usersFetched {\n\t\tif err := s.store.DeleteChUsersForConnection(connID); err != nil {\n\t\t\tsyncErr = err\n\t\t\treturn nil, syncErr\n\t\t}\n\n\t\tfor _, row := range userRows {\n\t\t\tname := fmt.Sprintf(\"%v\", row[\"name\"])\n\n\t\t\tvar defaultRoles *string\n\t\t\tif allRoles, ok := row[\"default_roles_all\"]; ok && fmt.Sprintf(\"%v\", allRoles) == \"1\" {\n\t\t\t\tdr := \"ALL\"\n\t\t\t\tdefaultRoles = &dr\n\t\t\t} else if roleList, ok := row[\"default_roles_list\"]; ok {\n\t\t\t\trl := fmt.Sprintf(\"%v\", roleList)\n\t\t\t\tif rl != \"\" && rl != \"<nil>\" && rl != \"[]\" {\n\t\t\t\t\tdefaultRoles = &rl\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tauthType := toStringPtr(row[\"auth_type\"])\n\t\t\thostIP := toStringPtr(row[\"host_ip\"])\n\n\t\t\tif err := s.store.UpsertChUser(ChUser{\n\t\t\t\tID:           uuid.NewString(),\n\t\t\t\tConnectionID: connID,\n\t\t\t\tName:         name,\n\t\t\t\tAuthType:     authType,\n\t\t\t\tHostIP:       hostIP,\n\t\t\t\tDefaultRoles: defaultRoles,\n\t\t\t\tFirstSeen:    now,\n\t\t\t\tLastUpdated:  now,\n\t\t\t}); err != nil {\n\t\t\t\tslog.Error(\"Failed to upsert CH user\", \"name\", name, \"error\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tresult.UsersSynced++\n\t\t}\n\t}\n\n\tif !usersFetched {\n\t\tslog.Warn(\"Access sync: keeping previous cached users because source query failed\", \"connection\", connID)\n\t}\n\n\t// ── Phase 2: Roles ──────────────────────────────────────────────────────\n\troleRows, err := s.executeQuery(creds,\n\t\t`SELECT name FROM system.roles ORDER BY name`)\n\tif err != nil {\n\t\tslog.Warn(\"Access sync: failed to query system.roles\", \"connection\", connID, \"error\", err)\n\t} else {\n\t\tif err := s.store.DeleteChRolesForConnection(connID); err != nil {\n\t\t\tsyncErr = err\n\t\t\treturn nil, syncErr\n\t\t}\n\n\t\tfor _, row := range roleRows {\n\t\t\tname := fmt.Sprintf(\"%v\", row[\"name\"])\n\n\t\t\tif err := s.store.UpsertChRole(ChRole{\n\t\t\t\tID:           uuid.NewString(),\n\t\t\t\tConnectionID: connID,\n\t\t\t\tName:         name,\n\t\t\t\tFirstSeen:    now,\n\t\t\t\tLastUpdated:  now,\n\t\t\t}); err != nil {\n\t\t\t\tslog.Error(\"Failed to upsert CH role\", \"name\", name, \"error\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tresult.RolesSynced++\n\t\t}\n\t}\n\n\t// ── Phase 3: Role grants ────────────────────────────────────────────────\n\troleGrantRows, err := s.executeQuery(creds,\n\t\t`SELECT\n\t\t\tuser_name,\n\t\t\tgranted_role_name,\n\t\t\tgranted_role_is_default,\n\t\t\twith_admin_option\n\t\t FROM system.role_grants\n\t\t ORDER BY user_name, granted_role_name`)\n\tif err != nil {\n\t\tslog.Warn(\"Access sync: failed to query system.role_grants\", \"connection\", connID, \"error\", err)\n\t} else {\n\t\tif err := s.store.DeleteRoleGrantsForConnection(connID); err != nil {\n\t\t\tsyncErr = err\n\t\t\treturn nil, syncErr\n\t\t}\n\n\t\tfor _, row := range roleGrantRows {\n\t\t\tuserName := fmt.Sprintf(\"%v\", row[\"user_name\"])\n\t\t\troleName := fmt.Sprintf(\"%v\", row[\"granted_role_name\"])\n\t\t\tisDefault := toBool(row[\"granted_role_is_default\"])\n\t\t\twithAdmin := toBool(row[\"with_admin_option\"])\n\n\t\t\tif err := s.store.UpsertRoleGrant(RoleGrant{\n\t\t\t\tID:              uuid.NewString(),\n\t\t\t\tConnectionID:    connID,\n\t\t\t\tUserName:        userName,\n\t\t\t\tGrantedRoleName: roleName,\n\t\t\t\tIsDefault:       isDefault,\n\t\t\t\tWithAdminOption: withAdmin,\n\t\t\t\tFirstSeen:       now,\n\t\t\t\tLastUpdated:     now,\n\t\t\t}); err != nil {\n\t\t\t\tslog.Error(\"Failed to upsert role grant\", \"user\", userName, \"role\", roleName, \"error\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t}\n\n\t// ── Phase 4: Grants (privileges) ────────────────────────────────────────\n\tgrantRows, err := s.executeQuery(creds,\n\t\t`SELECT\n\t\t\tuser_name,\n\t\t\trole_name,\n\t\t\taccess_type,\n\t\t\tdatabase AS grant_database,\n\t\t\ttable AS grant_table,\n\t\t\tcolumn AS grant_column,\n\t\t\tis_partial_revoke,\n\t\t\tgrant_option\n\t\t FROM system.grants\n\t\t ORDER BY user_name, role_name, access_type`)\n\tif err != nil {\n\t\tslog.Warn(\"Access sync: failed to query system.grants\", \"connection\", connID, \"error\", err)\n\t} else {\n\t\tif err := s.store.DeleteGrantsForConnection(connID); err != nil {\n\t\t\tsyncErr = err\n\t\t\treturn nil, syncErr\n\t\t}\n\n\t\tfor _, row := range grantRows {\n\t\t\tgrant := Grant{\n\t\t\t\tID:              uuid.NewString(),\n\t\t\t\tConnectionID:    connID,\n\t\t\t\tUserName:        toStringPtr(row[\"user_name\"]),\n\t\t\t\tRoleName:        toStringPtr(row[\"role_name\"]),\n\t\t\t\tAccessType:      fmt.Sprintf(\"%v\", row[\"access_type\"]),\n\t\t\t\tGrantDatabase:   toStringPtr(row[\"grant_database\"]),\n\t\t\t\tGrantTable:      toStringPtr(row[\"grant_table\"]),\n\t\t\t\tGrantColumn:     toStringPtr(row[\"grant_column\"]),\n\t\t\t\tIsPartialRevoke: toBool(row[\"is_partial_revoke\"]),\n\t\t\t\tGrantOption:     toBool(row[\"grant_option\"]),\n\t\t\t\tFirstSeen:       now,\n\t\t\t\tLastUpdated:     now,\n\t\t\t}\n\n\t\t\tif err := s.store.UpsertGrant(grant); err != nil {\n\t\t\t\tslog.Error(\"Failed to upsert grant\",\n\t\t\t\t\t\"user\", grant.UserName,\n\t\t\t\t\t\"role\", grant.RoleName,\n\t\t\t\t\t\"access_type\", grant.AccessType,\n\t\t\t\t\t\"error\", err,\n\t\t\t\t)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tresult.GrantsSynced++\n\t\t}\n\t}\n\n\t// ── Phase 5: Rebuild access matrix ──────────────────────────────────────\n\tmatrixCount, err := s.store.RebuildAccessMatrix(connID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to rebuild access matrix\", \"connection\", connID, \"error\", err)\n\t} else {\n\t\tresult.MatrixEntries = matrixCount\n\t}\n\n\t// ── Phase 6: Count over-permissions ─────────────────────────────────────\n\toverPerms, err := s.store.GetOverPermissionsWithDays(connID, overPermissionInactiveDays)\n\tif err != nil {\n\t\tslog.Warn(\"Failed to count over-permissions\", \"connection\", connID, \"error\", err)\n\t} else {\n\t\tresult.OverPermissions = len(overPerms)\n\t}\n\n\tslog.Info(\"Access sync completed\",\n\t\t\"connection\", connID,\n\t\t\"users\", result.UsersSynced,\n\t\t\"roles\", result.RolesSynced,\n\t\t\"grants\", result.GrantsSynced,\n\t\t\"matrix_entries\", result.MatrixEntries,\n\t\t\"over_permissions\", result.OverPermissions,\n\t)\n\n\treturn result, nil\n}\n\n// toBool converts an interface{} to bool. Handles ClickHouse-style values:\n// 0/1 (as float64 or string), true/false, etc.\nfunc toBool(v interface{}) bool {\n\tif v == nil {\n\t\treturn false\n\t}\n\tswitch val := v.(type) {\n\tcase bool:\n\t\treturn val\n\tcase float64:\n\t\treturn val != 0\n\tcase int64:\n\t\treturn val != 0\n\tcase int:\n\t\treturn val != 0\n\tcase string:\n\t\treturn val == \"1\" || val == \"true\" || val == \"True\"\n\tdefault:\n\t\treturn fmt.Sprintf(\"%v\", v) == \"1\"\n\t}\n}\n"
  },
  {
    "path": "internal/governance/harvester_metadata.go",
    "content": "package governance\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"strconv\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// syncMetadata harvests database/table/column metadata from ClickHouse system tables,\n// diffs against existing SQLite state, and records schema changes.\nfunc (s *Syncer) syncMetadata(ctx context.Context, creds CHCredentials) (*MetadataSyncResult, error) {\n\tconnID := creds.ConnectionID\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\t// Update sync state to running\n\tif err := s.store.UpsertSyncState(connID, string(SyncMetadata), \"running\", nil, nil, 0); err != nil {\n\t\tslog.Error(\"Failed to update sync state\", \"error\", err)\n\t}\n\n\tresult := &MetadataSyncResult{}\n\tvar syncErr error\n\n\tdefer func() {\n\t\tstatus := \"idle\"\n\t\tvar errMsg *string\n\t\tif syncErr != nil {\n\t\t\tstatus = \"error\"\n\t\t\te := syncErr.Error()\n\t\t\terrMsg = &e\n\t\t}\n\t\trowCount := result.DatabasesSynced + result.TablesSynced + result.ColumnsSynced\n\t\tif err := s.store.UpsertSyncState(connID, string(SyncMetadata), status, &now, errMsg, rowCount); err != nil {\n\t\t\tslog.Error(\"Failed to update sync state after metadata sync\", \"error\", err)\n\t\t}\n\t}()\n\n\t// ── Phase 1: Databases ──────────────────────────────────────────────────\n\tdbRows, err := s.executeQuery(creds,\n\t\t`SELECT name, engine FROM system.databases\n\t\t WHERE name NOT IN ('system', 'INFORMATION_SCHEMA', 'information_schema')\n\t\t ORDER BY name`)\n\tif err != nil {\n\t\tslog.Warn(\"Metadata sync: failed to query databases\", \"connection\", connID, \"error\", err)\n\t\tsyncErr = fmt.Errorf(\"databases query failed: %w\", err)\n\t\treturn result, syncErr\n\t}\n\n\texistingDBs, err := s.store.GetDatabases(connID)\n\tif err != nil {\n\t\tsyncErr = fmt.Errorf(\"failed to load existing databases: %w\", err)\n\t\treturn result, syncErr\n\t}\n\texistingDBMap := make(map[string]*GovDatabase, len(existingDBs))\n\tfor i := range existingDBs {\n\t\texistingDBMap[existingDBs[i].Name] = &existingDBs[i]\n\t}\n\n\tseenDBs := make(map[string]bool)\n\tfor _, row := range dbRows {\n\t\tname := fmt.Sprintf(\"%v\", row[\"name\"])\n\t\tengine := fmt.Sprintf(\"%v\", row[\"engine\"])\n\t\tseenDBs[name] = true\n\n\t\t_, found := existingDBMap[name]\n\t\tif err := s.store.UpsertDatabase(GovDatabase{\n\t\t\tID:           uuid.NewString(),\n\t\t\tConnectionID: connID,\n\t\t\tName:         name,\n\t\t\tEngine:       engine,\n\t\t\tFirstSeen:    now,\n\t\t\tLastUpdated:  now,\n\t\t}); err != nil {\n\t\t\tslog.Error(\"Failed to upsert database\", \"name\", name, \"error\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\tif !found {\n\t\t\ts.store.CreateSchemaChange(connID, ChangeDatabaseAdded, name, \"\", \"\", \"\", name)\n\t\t\tresult.SchemaChanges++\n\t\t}\n\t\tresult.DatabasesSynced++\n\t}\n\n\t// Mark removed databases\n\tfor name, existing := range existingDBMap {\n\t\tif !seenDBs[name] && !existing.IsDeleted {\n\t\t\tif err := s.store.MarkDatabaseDeleted(connID, name); err != nil {\n\t\t\t\tslog.Error(\"Failed to mark database deleted\", \"name\", name, \"error\", err)\n\t\t\t}\n\t\t\ts.store.CreateSchemaChange(connID, ChangeDatabaseRemoved, name, \"\", \"\", name, \"\")\n\t\t\tresult.SchemaChanges++\n\t\t}\n\t}\n\n\t// ── Phase 2: Tables with stats ──────────────────────────────────────────\n\ttableRows, err := s.executeQuery(creds,\n\t\t`SELECT\n\t\t\tt.database AS database_name,\n\t\t\tt.name AS table_name,\n\t\t\tt.engine AS engine,\n\t\t\tt.uuid AS table_uuid,\n\t\t\tCOALESCE(sum(p.rows), 0) AS total_rows,\n\t\t\tCOALESCE(sum(p.bytes_on_disk), 0) AS total_bytes,\n\t\t\tCOALESCE(count(DISTINCT p.partition), 0) AS partition_count\n\t\t FROM system.tables t\n\t\t LEFT JOIN system.parts p ON p.database = t.database AND p.table = t.name AND p.active = 1\n\t\t WHERE t.database NOT IN ('system', 'INFORMATION_SCHEMA', 'information_schema')\n\t\t GROUP BY t.database, t.name, t.engine, t.uuid\n\t\t ORDER BY t.database, t.name`)\n\tif err != nil {\n\t\tslog.Warn(\"Metadata sync: failed to query tables\", \"connection\", connID, \"error\", err)\n\t\t// Continue — tables query failure is non-fatal\n\t} else {\n\t\texistingTables, err := s.store.GetTables(connID)\n\t\tif err != nil {\n\t\t\tslog.Error(\"Failed to load existing tables\", \"error\", err)\n\t\t}\n\t\texistingTableMap := make(map[string]*GovTable)\n\t\tfor i := range existingTables {\n\t\t\tkey := existingTables[i].DatabaseName + \".\" + existingTables[i].TableName\n\t\t\texistingTableMap[key] = &existingTables[i]\n\t\t}\n\n\t\tseenTables := make(map[string]bool)\n\t\tfor _, row := range tableRows {\n\t\t\tdbName := fmt.Sprintf(\"%v\", row[\"database_name\"])\n\t\t\ttableName := fmt.Sprintf(\"%v\", row[\"table_name\"])\n\t\t\tengine := fmt.Sprintf(\"%v\", row[\"engine\"])\n\t\t\ttableUUID := fmt.Sprintf(\"%v\", row[\"table_uuid\"])\n\t\t\ttotalRows := toInt64(row[\"total_rows\"])\n\t\t\ttotalBytes := toInt64(row[\"total_bytes\"])\n\t\t\tpartCount := int(toInt64(row[\"partition_count\"]))\n\t\t\tkey := dbName + \".\" + tableName\n\t\t\tseenTables[key] = true\n\n\t\t\t_, found := existingTableMap[key]\n\t\t\tif err := s.store.UpsertTable(GovTable{\n\t\t\t\tID:             uuid.NewString(),\n\t\t\t\tConnectionID:   connID,\n\t\t\t\tDatabaseName:   dbName,\n\t\t\t\tTableName:      tableName,\n\t\t\t\tEngine:         engine,\n\t\t\t\tTableUUID:      tableUUID,\n\t\t\t\tTotalRows:      totalRows,\n\t\t\t\tTotalBytes:     totalBytes,\n\t\t\t\tPartitionCount: partCount,\n\t\t\t\tFirstSeen:      now,\n\t\t\t\tLastUpdated:    now,\n\t\t\t}); err != nil {\n\t\t\t\tslog.Error(\"Failed to upsert table\", \"table\", key, \"error\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif !found {\n\t\t\t\ts.store.CreateSchemaChange(connID, ChangeTableAdded, dbName, tableName, \"\", \"\", tableName)\n\t\t\t\tresult.SchemaChanges++\n\t\t\t}\n\t\t\tresult.TablesSynced++\n\t\t}\n\n\t\t// Mark removed tables\n\t\tfor key, existing := range existingTableMap {\n\t\t\tif !seenTables[key] && !existing.IsDeleted {\n\t\t\t\tif err := s.store.MarkTableDeleted(connID, existing.DatabaseName, existing.TableName); err != nil {\n\t\t\t\t\tslog.Error(\"Failed to mark table deleted\", \"table\", key, \"error\", err)\n\t\t\t\t}\n\t\t\t\ts.store.CreateSchemaChange(connID, ChangeTableRemoved, existing.DatabaseName, existing.TableName, \"\", existing.TableName, \"\")\n\t\t\t\tresult.SchemaChanges++\n\t\t\t}\n\t\t}\n\t}\n\n\t// ── Phase 3: Columns ────────────────────────────────────────────────────\n\tcolRows, err := s.executeQuery(creds,\n\t\t`SELECT\n\t\t\tdatabase AS database_name,\n\t\t\ttable AS table_name,\n\t\t\tname AS column_name,\n\t\t\ttype AS column_type,\n\t\t\tposition AS column_position,\n\t\t\tdefault_kind,\n\t\t\tdefault_expression,\n\t\t\tcomment\n\t\t FROM system.columns\n\t\t WHERE database NOT IN ('system', 'INFORMATION_SCHEMA', 'information_schema')\n\t\t ORDER BY database, table, position`)\n\tif err != nil {\n\t\tslog.Warn(\"Metadata sync: failed to query columns\", \"connection\", connID, \"error\", err)\n\t} else {\n\t\texistingColMap := make(map[string]*GovColumn)\n\t\ttables, tblErr := s.store.GetTables(connID)\n\t\tif tblErr == nil {\n\t\t\tfor _, tbl := range tables {\n\t\t\t\tcols, colErr := s.store.GetColumns(connID, tbl.DatabaseName, tbl.TableName)\n\t\t\t\tif colErr != nil {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tfor i := range cols {\n\t\t\t\t\tkey := cols[i].DatabaseName + \".\" + cols[i].TableName + \".\" + cols[i].ColumnName\n\t\t\t\t\texistingColMap[key] = &cols[i]\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tseenCols := make(map[string]bool)\n\t\tfor _, row := range colRows {\n\t\t\tdbName := fmt.Sprintf(\"%v\", row[\"database_name\"])\n\t\t\ttableName := fmt.Sprintf(\"%v\", row[\"table_name\"])\n\t\t\tcolName := fmt.Sprintf(\"%v\", row[\"column_name\"])\n\t\t\tcolType := fmt.Sprintf(\"%v\", row[\"column_type\"])\n\t\t\tposition := int(toInt64(row[\"column_position\"]))\n\t\t\tkey := dbName + \".\" + tableName + \".\" + colName\n\t\t\tseenCols[key] = true\n\n\t\t\tdefaultKind := toStringPtr(row[\"default_kind\"])\n\t\t\tdefaultExpr := toStringPtr(row[\"default_expression\"])\n\t\t\tcomment := toStringPtr(row[\"comment\"])\n\n\t\t\texisting, found := existingColMap[key]\n\t\t\tif err := s.store.UpsertColumn(GovColumn{\n\t\t\t\tID:                uuid.NewString(),\n\t\t\t\tConnectionID:      connID,\n\t\t\t\tDatabaseName:      dbName,\n\t\t\t\tTableName:         tableName,\n\t\t\t\tColumnName:        colName,\n\t\t\t\tColumnType:        colType,\n\t\t\t\tColumnPosition:    position,\n\t\t\t\tDefaultKind:       defaultKind,\n\t\t\t\tDefaultExpression: defaultExpr,\n\t\t\t\tComment:           comment,\n\t\t\t\tFirstSeen:         now,\n\t\t\t\tLastUpdated:       now,\n\t\t\t}); err != nil {\n\t\t\t\tslog.Error(\"Failed to upsert column\", \"column\", key, \"error\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif !found {\n\t\t\t\ts.store.CreateSchemaChange(connID, ChangeColumnAdded, dbName, tableName, colName, \"\", colName)\n\t\t\t\tresult.SchemaChanges++\n\t\t\t} else if existing.ColumnType != colType {\n\t\t\t\ts.store.CreateSchemaChange(connID, ChangeColumnTypeChanged, dbName, tableName, colName, existing.ColumnType, colType)\n\t\t\t\tresult.SchemaChanges++\n\t\t\t}\n\t\t\tresult.ColumnsSynced++\n\t\t}\n\n\t\t// Mark removed columns\n\t\tfor key, existing := range existingColMap {\n\t\t\tif !seenCols[key] && !existing.IsDeleted {\n\t\t\t\tif err := s.store.MarkColumnDeleted(connID, existing.DatabaseName, existing.TableName, existing.ColumnName); err != nil {\n\t\t\t\t\tslog.Error(\"Failed to mark column deleted\", \"column\", key, \"error\", err)\n\t\t\t\t}\n\t\t\t\ts.store.CreateSchemaChange(connID, ChangeColumnRemoved, existing.DatabaseName, existing.TableName, existing.ColumnName, existing.ColumnName, \"\")\n\t\t\t\tresult.SchemaChanges++\n\t\t\t}\n\t\t}\n\t}\n\n\tslog.Info(\"Metadata sync completed\",\n\t\t\"connection\", connID,\n\t\t\"databases\", result.DatabasesSynced,\n\t\t\"tables\", result.TablesSynced,\n\t\t\"columns\", result.ColumnsSynced,\n\t\t\"changes\", result.SchemaChanges,\n\t)\n\n\treturn result, nil\n}\n\n// toInt64 converts interface{} values (float64, string, json.Number) to int64.\nfunc toInt64(v interface{}) int64 {\n\tif v == nil {\n\t\treturn 0\n\t}\n\tswitch val := v.(type) {\n\tcase float64:\n\t\treturn int64(val)\n\tcase int64:\n\t\treturn val\n\tcase int:\n\t\treturn int64(val)\n\tcase json.Number:\n\t\tn, _ := val.Int64()\n\t\treturn n\n\tcase string:\n\t\tn, _ := strconv.ParseInt(val, 10, 64)\n\t\treturn n\n\tdefault:\n\t\ts := fmt.Sprintf(\"%v\", v)\n\t\tn, _ := strconv.ParseInt(s, 10, 64)\n\t\treturn n\n\t}\n}\n\n// toStringPtr converts interface{} to *string. Returns nil for nil or empty strings.\nfunc toStringPtr(v interface{}) *string {\n\tif v == nil {\n\t\treturn nil\n\t}\n\ts := fmt.Sprintf(\"%v\", v)\n\tif s == \"\" || s == \"<nil>\" {\n\t\treturn nil\n\t}\n\treturn &s\n}\n"
  },
  {
    "path": "internal/governance/harvester_querylog.go",
    "content": "package governance\n\nimport (\n\t\"context\"\n\t\"crypto/sha256\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"regexp\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/alerts\"\n\t\"github.com/google/uuid\"\n)\n\nconst queryLogBatchLimit = 5000\nconst defaultQueryLogWatermark = \"2000-01-01 00:00:00\"\n\n// syncQueryLog harvests recent queries from system.query_log, classifies them,\n// extracts lineage, and evaluates access policies.\nfunc (s *Syncer) syncQueryLog(ctx context.Context, creds CHCredentials) (*QueryLogSyncResult, error) {\n\tconnID := creds.ConnectionID\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\t// Update sync state to running\n\tif err := s.store.UpsertSyncState(connID, string(SyncQueryLog), \"running\", nil, nil, 0); err != nil {\n\t\tslog.Error(\"Failed to update sync state\", \"error\", err)\n\t}\n\n\tresult := &QueryLogSyncResult{}\n\tvar syncErr error\n\n\tdefer func() {\n\t\tstatus := \"idle\"\n\t\tvar errMsg *string\n\t\tif syncErr != nil {\n\t\t\tstatus = \"error\"\n\t\t\te := syncErr.Error()\n\t\t\terrMsg = &e\n\t\t}\n\t\twatermark := result.NewWatermark\n\t\tif watermark == \"\" {\n\t\t\twatermark = now\n\t\t}\n\t\tif err := s.store.UpsertSyncState(connID, string(SyncQueryLog), status, &watermark, errMsg, result.QueriesIngested); err != nil {\n\t\t\tslog.Error(\"Failed to update sync state after query log sync\", \"error\", err)\n\t\t}\n\t}()\n\n\t// Get current watermark from sync state\n\twatermark := defaultQueryLogWatermark\n\tstate, err := s.store.GetSyncState(connID, string(SyncQueryLog))\n\tif err == nil && state != nil && state.Watermark != nil && *state.Watermark != \"\" {\n\t\twatermark = sanitizeQueryLogWatermark(*state.Watermark)\n\t}\n\n\t// Query system.query_log for finished queries since watermark\n\tsql := fmt.Sprintf(`SELECT\n\t\tquery_id,\n\t\tuser AS ch_user,\n\t\tquery,\n\t\tevent_time,\n\t\tquery_duration_ms,\n\t\tread_rows,\n\t\tread_bytes,\n\t\tresult_rows,\n\t\twritten_rows,\n\t\twritten_bytes,\n\t\tmemory_usage,\n\t\ttables,\n\t\texception_code,\n\t\texception\n\tFROM system.query_log\n\tWHERE type = 'QueryFinish'\n\t  AND is_initial_query = 1\n\t  AND event_time > parseDateTimeBestEffort('%s')\n\t  AND query_duration_ms >= 10\n\t  AND query NOT LIKE '%%system.query_log%%'\n\t  AND query NOT LIKE '%%system.tables%%'\n\t  AND query NOT LIKE '%%system.columns%%'\n\t  AND query NOT LIKE '%%system.grants%%'\n\tORDER BY event_time ASC\n\tLIMIT %d`, watermark, queryLogBatchLimit)\n\n\trows, err := s.executeQuery(creds, sql)\n\tif err != nil {\n\t\t// Fallback for older CH setups where the \"tables\" column may be unavailable.\n\t\tfallbackSQL := fmt.Sprintf(`SELECT\n\t\tquery_id,\n\t\tuser AS ch_user,\n\t\tquery,\n\t\tevent_time,\n\t\tquery_duration_ms,\n\t\tread_rows,\n\t\tread_bytes,\n\t\tresult_rows,\n\t\twritten_rows,\n\t\twritten_bytes,\n\t\tmemory_usage,\n\t\tCAST([], 'Array(String)') AS tables,\n\t\texception_code,\n\t\texception\n\tFROM system.query_log\n\tWHERE type = 'QueryFinish'\n\t  AND is_initial_query = 1\n\t  AND event_time > parseDateTimeBestEffort('%s')\n\t  AND query_duration_ms >= 10\n\t  AND query NOT LIKE '%%system.query_log%%'\n\t  AND query NOT LIKE '%%system.tables%%'\n\t  AND query NOT LIKE '%%system.columns%%'\n\t  AND query NOT LIKE '%%system.grants%%'\n\tORDER BY event_time ASC\n\tLIMIT %d`, watermark, queryLogBatchLimit)\n\n\t\trows, err = s.executeQuery(creds, fallbackSQL)\n\t\tif err != nil {\n\t\t\tsyncErr = fmt.Errorf(\"query_log query failed: %w\", err)\n\t\t\treturn result, syncErr\n\t\t}\n\t}\n\n\tif len(rows) == 0 {\n\t\tresult.NewWatermark = watermark\n\t\treturn result, nil\n\t}\n\n\t// Build QueryLogEntry batch\n\tvar entries []QueryLogEntry\n\tvar latestEventTime string\n\n\tfor _, row := range rows {\n\t\tqueryText := fmt.Sprintf(\"%v\", row[\"query\"])\n\t\tnormalized := normalizeQuery(queryText)\n\t\thash := hashNormalized(normalized)\n\t\tkind := classifyQuery(queryText)\n\t\teventTime := fmt.Sprintf(\"%v\", row[\"event_time\"])\n\n\t\tisError := false\n\t\tvar errorMsg *string\n\t\tif exCode := toInt64(row[\"exception_code\"]); exCode != 0 {\n\t\t\tisError = true\n\t\t\tex := fmt.Sprintf(\"%v\", row[\"exception\"])\n\t\t\terrorMsg = &ex\n\t\t}\n\n\t\tentry := QueryLogEntry{\n\t\t\tID:             uuid.NewString(),\n\t\t\tConnectionID:   connID,\n\t\t\tQueryID:        fmt.Sprintf(\"%v\", row[\"query_id\"]),\n\t\t\tUser:           fmt.Sprintf(\"%v\", row[\"ch_user\"]),\n\t\t\tQueryText:      queryText,\n\t\t\tNormalizedHash: hash,\n\t\t\tQueryKind:      kind,\n\t\t\tEventTime:      eventTime,\n\t\t\tDurationMs:     toInt64(row[\"query_duration_ms\"]),\n\t\t\tReadRows:       toInt64(row[\"read_rows\"]),\n\t\t\tReadBytes:      toInt64(row[\"read_bytes\"]),\n\t\t\tResultRows:     toInt64(row[\"result_rows\"]),\n\t\t\tWrittenRows:    toInt64(row[\"written_rows\"]),\n\t\t\tWrittenBytes:   toInt64(row[\"written_bytes\"]),\n\t\t\tMemoryUsage:    toInt64(row[\"memory_usage\"]),\n\t\t\tTablesUsed:     extractTablesJSON(row[\"tables\"]),\n\t\t\tIsError:        isError,\n\t\t\tErrorMessage:   errorMsg,\n\t\t\tCreatedAt:      now,\n\t\t}\n\n\t\tentries = append(entries, entry)\n\t\tlatestEventTime = eventTime\n\t}\n\n\t// Batch insert into SQLite\n\tinserted, err := s.store.InsertQueryLogBatch(entries)\n\tif err != nil {\n\t\tsyncErr = fmt.Errorf(\"failed to batch insert query log: %w\", err)\n\t\treturn result, syncErr\n\t}\n\tresult.QueriesIngested = inserted\n\n\t// Update watermark to the latest event time\n\tif latestEventTime != \"\" {\n\t\tresult.NewWatermark = latestEventTime\n\t} else {\n\t\tresult.NewWatermark = watermark\n\t}\n\n\t// Extract lineage (table + column level) from each entry\n\tlineageCount := 0\n\tfor _, entry := range entries {\n\t\tresults := ExtractLineageWithColumns(connID, entry)\n\t\tfor _, lr := range results {\n\t\t\tif err := s.store.InsertLineageEdge(lr.Edge); err != nil {\n\t\t\t\tslog.Error(\"Failed to insert lineage edge\", \"error\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tlineageCount++\n\t\t\t// Insert column-level mappings\n\t\t\tfor _, cm := range lr.ColumnMappings {\n\t\t\t\tcolEdge := ColumnLineageEdge{\n\t\t\t\t\tID:            uuid.New().String(),\n\t\t\t\t\tLineageEdgeID: lr.Edge.ID,\n\t\t\t\t\tConnectionID:  connID,\n\t\t\t\t\tSourceColumn:  cm.SourceColumn,\n\t\t\t\t\tTargetColumn:  cm.TargetColumn,\n\t\t\t\t}\n\t\t\t\tif err := s.store.InsertColumnLineageEdge(colEdge); err != nil {\n\t\t\t\t\tslog.Error(\"Failed to insert column lineage edge\", \"error\", err)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tresult.LineageEdgesFound = lineageCount\n\n\t// Evaluate policies against each entry\n\tviolationCount := 0\n\tpolicies, err := s.store.GetPolicies(connID)\n\tif err != nil {\n\t\tslog.Warn(\"Failed to load policies for violation check\", \"error\", err)\n\t} else if len(policies) > 0 {\n\t\tfor _, entry := range entries {\n\t\t\tviolations := EvaluatePolicies(connID, entry, policies, s.store)\n\t\t\tfor _, v := range violations {\n\t\t\t\tviolationID, err := s.store.CreateViolation(connID, v.PolicyID, v.QueryLogID, v.User, v.ViolationDetail, v.Severity, \"post_exec\", \"\")\n\t\t\t\tif err != nil {\n\t\t\t\t\tslog.Error(\"Failed to insert policy violation\", \"error\", err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\n\t\t\t\tpolicyName := strings.TrimSpace(v.PolicyName)\n\t\t\t\tif policyName == \"\" {\n\t\t\t\t\tpolicyName = v.PolicyID\n\t\t\t\t}\n\t\t\t\talertSeverity := strings.ToLower(strings.TrimSpace(v.Severity))\n\t\t\t\tif alertSeverity == \"\" {\n\t\t\t\t\talertSeverity = alerts.SeverityWarn\n\t\t\t\t}\n\t\t\t\tfingerprint := fmt.Sprintf(\"policy:%s:user:%s:hash:%s\", v.PolicyID, v.User, entry.NormalizedHash)\n\t\t\t\tif _, err := s.db.CreateAlertEvent(\n\t\t\t\t\t&connID,\n\t\t\t\t\talerts.EventTypePolicyViolation,\n\t\t\t\t\talertSeverity,\n\t\t\t\t\tfmt.Sprintf(\"Policy violation: %s\", policyName),\n\t\t\t\t\tv.ViolationDetail,\n\t\t\t\t\tmap[string]interface{}{\n\t\t\t\t\t\t\"violation_id\":       violationID,\n\t\t\t\t\t\t\"policy_id\":          v.PolicyID,\n\t\t\t\t\t\t\"policy_name\":        v.PolicyName,\n\t\t\t\t\t\t\"query_id\":           entry.QueryID,\n\t\t\t\t\t\t\"query_kind\":         entry.QueryKind,\n\t\t\t\t\t\t\"ch_user\":            entry.User,\n\t\t\t\t\t\t\"query_hash\":         entry.NormalizedHash,\n\t\t\t\t\t\t\"event_time\":         entry.EventTime,\n\t\t\t\t\t\t\"violation_severity\": v.Severity,\n\t\t\t\t\t},\n\t\t\t\t\tfingerprint,\n\t\t\t\t\tviolationID,\n\t\t\t\t); err != nil {\n\t\t\t\t\tslog.Warn(\"Failed to create alert event for policy violation\", \"error\", err)\n\t\t\t\t}\n\t\t\t\tif _, created, err := s.store.UpsertIncidentFromViolation(\n\t\t\t\t\tconnID,\n\t\t\t\t\tviolationID,\n\t\t\t\t\tpolicyName,\n\t\t\t\t\tv.User,\n\t\t\t\t\talertSeverity,\n\t\t\t\t\tv.ViolationDetail,\n\t\t\t\t); err != nil {\n\t\t\t\t\tslog.Warn(\"Failed to upsert incident for policy violation\", \"violation\", violationID, \"error\", err)\n\t\t\t\t} else if created {\n\t\t\t\t\tslog.Info(\"Governance incident created from violation\", \"violation\", violationID)\n\t\t\t\t}\n\t\t\t\tviolationCount++\n\t\t\t}\n\t\t}\n\t}\n\tresult.ViolationsFound = violationCount\n\n\tslog.Info(\"Query log sync completed\",\n\t\t\"connection\", connID,\n\t\t\"ingested\", result.QueriesIngested,\n\t\t\"lineage_edges\", result.LineageEdgesFound,\n\t\t\"violations\", result.ViolationsFound,\n\t\t\"new_watermark\", result.NewWatermark,\n\t)\n\n\treturn result, nil\n}\n\n// ── Query helper functions ──────────────────────────────────────────────────\n\n// classifyQuery returns a classification string for the query type.\nfunc classifyQuery(query string) string {\n\ttrimmed := strings.TrimSpace(query)\n\tfor strings.HasPrefix(trimmed, \"--\") {\n\t\tif idx := strings.Index(trimmed, \"\\n\"); idx >= 0 {\n\t\t\ttrimmed = strings.TrimSpace(trimmed[idx+1:])\n\t\t} else {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tupper := strings.ToUpper(trimmed)\n\tswitch {\n\tcase strings.HasPrefix(upper, \"SELECT\") || strings.HasPrefix(upper, \"WITH\"):\n\t\treturn \"Select\"\n\tcase strings.HasPrefix(upper, \"INSERT\"):\n\t\treturn \"Insert\"\n\tcase strings.HasPrefix(upper, \"CREATE\"):\n\t\treturn \"Create\"\n\tcase strings.HasPrefix(upper, \"ALTER\"):\n\t\treturn \"Alter\"\n\tcase strings.HasPrefix(upper, \"DROP\"):\n\t\treturn \"Drop\"\n\tdefault:\n\t\treturn \"Other\"\n\t}\n}\n\nvar (\n\tstringLiteralRe = regexp.MustCompile(`'[^']*'`)\n\tnumberLiteralRe = regexp.MustCompile(`\\b\\d+\\.?\\d*\\b`)\n\tmultiSpaceRe    = regexp.MustCompile(`\\s+`)\n)\n\nfunc normalizeQuery(query string) string {\n\tnormalized := stringLiteralRe.ReplaceAllString(query, \"'?'\")\n\tnormalized = numberLiteralRe.ReplaceAllString(normalized, \"?\")\n\tnormalized = multiSpaceRe.ReplaceAllString(normalized, \" \")\n\tnormalized = strings.TrimSpace(normalized)\n\treturn strings.ToUpper(normalized)\n}\n\nfunc hashNormalized(normalized string) string {\n\th := sha256.Sum256([]byte(normalized))\n\treturn fmt.Sprintf(\"%x\", h)[:32]\n}\n\nfunc extractTablesJSON(v interface{}) string {\n\tif v == nil {\n\t\treturn \"[]\"\n\t}\n\n\tswitch val := v.(type) {\n\tcase string:\n\t\tif strings.HasPrefix(val, \"[\") {\n\t\t\treturn val\n\t\t}\n\t\tif val == \"\" {\n\t\t\treturn \"[]\"\n\t\t}\n\t\tb, _ := json.Marshal([]string{val})\n\t\treturn string(b)\n\tcase []interface{}:\n\t\tstrs := make([]string, 0, len(val))\n\t\tfor _, item := range val {\n\t\t\tstrs = append(strs, fmt.Sprintf(\"%v\", item))\n\t\t}\n\t\tb, _ := json.Marshal(strs)\n\t\treturn string(b)\n\tcase []string:\n\t\tb, _ := json.Marshal(val)\n\t\treturn string(b)\n\tdefault:\n\t\tb, err := json.Marshal(v)\n\t\tif err != nil {\n\t\t\treturn \"[]\"\n\t\t}\n\t\treturn string(b)\n\t}\n}\n\nfunc sanitizeQueryLogWatermark(v string) string {\n\ts := strings.TrimSpace(v)\n\tif s == \"\" {\n\t\treturn defaultQueryLogWatermark\n\t}\n\n\tlayouts := []string{\n\t\ttime.RFC3339Nano,\n\t\ttime.RFC3339,\n\t\t\"2006-01-02 15:04:05.999999999\",\n\t\t\"2006-01-02 15:04:05.999999\",\n\t\t\"2006-01-02 15:04:05\",\n\t}\n\n\tfor _, layout := range layouts {\n\t\tif t, err := time.Parse(layout, s); err == nil {\n\t\t\treturn t.UTC().Format(\"2006-01-02 15:04:05\")\n\t\t}\n\t}\n\n\t// Last-resort hardening against accidental malformed/corrupt values.\n\ts = strings.ReplaceAll(s, \"'\", \"\")\n\tif s == \"\" {\n\t\treturn defaultQueryLogWatermark\n\t}\n\treturn s\n}\n"
  },
  {
    "path": "internal/governance/incidents.go",
    "content": "package governance\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// ── Object comments ──────────────────────────────────────────────────────────\n\nfunc (s *Store) CreateObjectComment(connectionID, objectType, dbName, tableName, columnName, commentText, createdBy string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif objectType == \"table\" {\n\t\tcolumnName = \"\"\n\t}\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_object_comments (id, connection_id, object_type, database_name, table_name, column_name, comment_text, created_by, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tid,\n\t\tconnectionID,\n\t\tstrings.ToLower(strings.TrimSpace(objectType)),\n\t\tstrings.TrimSpace(dbName),\n\t\tstrings.TrimSpace(tableName),\n\t\tstrings.TrimSpace(columnName),\n\t\tstrings.TrimSpace(commentText),\n\t\tnullableValue(createdBy),\n\t\tnow,\n\t\tnow,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create object comment: %w\", err)\n\t}\n\treturn id, nil\n}\n\nfunc (s *Store) ListObjectComments(connectionID, objectType, dbName, tableName, columnName string, limit int) ([]ObjectComment, error) {\n\tif limit <= 0 {\n\t\tlimit = 100\n\t}\n\tif limit > 1000 {\n\t\tlimit = 1000\n\t}\n\n\twhere := []string{\"connection_id = ?\"}\n\targs := []interface{}{connectionID}\n\tif ot := strings.TrimSpace(strings.ToLower(objectType)); ot != \"\" {\n\t\twhere = append(where, \"object_type = ?\")\n\t\targs = append(args, ot)\n\t}\n\tif db := strings.TrimSpace(dbName); db != \"\" {\n\t\twhere = append(where, \"database_name = ?\")\n\t\targs = append(args, db)\n\t}\n\tif tbl := strings.TrimSpace(tableName); tbl != \"\" {\n\t\twhere = append(where, \"table_name = ?\")\n\t\targs = append(args, tbl)\n\t}\n\tif col := strings.TrimSpace(columnName); col != \"\" {\n\t\twhere = append(where, \"column_name = ?\")\n\t\targs = append(args, col)\n\t}\n\targs = append(args, limit)\n\n\tquery := fmt.Sprintf(\n\t\t`SELECT id, connection_id, object_type, database_name, table_name, column_name, comment_text, created_by, created_at, updated_at\n\t\t FROM gov_object_comments\n\t\t WHERE %s\n\t\t ORDER BY created_at DESC\n\t\t LIMIT ?`,\n\t\tstrings.Join(where, \" AND \"),\n\t)\n\n\trows, err := s.conn().Query(query, args...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"list object comments: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tout := make([]ObjectComment, 0)\n\tfor rows.Next() {\n\t\tvar c ObjectComment\n\t\tvar createdBy sql.NullString\n\t\tif err := rows.Scan(\n\t\t\t&c.ID, &c.ConnectionID, &c.ObjectType, &c.DatabaseName, &c.TableName, &c.ColumnName,\n\t\t\t&c.CommentText, &createdBy, &c.CreatedAt, &c.UpdatedAt,\n\t\t); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan object comment: %w\", err)\n\t\t}\n\t\tc.CreatedBy = nullStringToPtr(createdBy)\n\t\tout = append(out, c)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate object comments: %w\", err)\n\t}\n\treturn out, nil\n}\n\nfunc (s *Store) DeleteObjectComment(connectionID, id string) error {\n\tres, err := s.conn().Exec(\n\t\t`DELETE FROM gov_object_comments WHERE id = ? AND connection_id = ?`,\n\t\tid, connectionID,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete object comment: %w\", err)\n\t}\n\taffected, _ := res.RowsAffected()\n\tif affected == 0 {\n\t\treturn sql.ErrNoRows\n\t}\n\treturn nil\n}\n\n// ── Incidents ────────────────────────────────────────────────────────────────\n\nfunc (s *Store) ListIncidents(connectionID, status, severity string, limit int) ([]Incident, error) {\n\tif limit <= 0 {\n\t\tlimit = 100\n\t}\n\tif limit > 1000 {\n\t\tlimit = 1000\n\t}\n\twhere := []string{\"connection_id = ?\"}\n\targs := []interface{}{connectionID}\n\tif v := strings.TrimSpace(strings.ToLower(status)); v != \"\" {\n\t\twhere = append(where, \"status = ?\")\n\t\targs = append(args, v)\n\t}\n\tif v := strings.TrimSpace(strings.ToLower(severity)); v != \"\" {\n\t\twhere = append(where, \"severity = ?\")\n\t\targs = append(args, v)\n\t}\n\targs = append(args, limit)\n\n\tquery := fmt.Sprintf(\n\t\t`SELECT id, connection_id, source_type, source_ref, dedupe_key, title, severity, status, assignee, details, resolution_note,\n\t\t        occurrence_count, first_seen_at, last_seen_at, resolved_at, created_by, created_at, updated_at\n\t\t FROM gov_incidents\n\t\t WHERE %s\n\t\t ORDER BY last_seen_at DESC\n\t\t LIMIT ?`,\n\t\tstrings.Join(where, \" AND \"),\n\t)\n\trows, err := s.conn().Query(query, args...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"list incidents: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tout := make([]Incident, 0)\n\tfor rows.Next() {\n\t\titem, err := scanIncident(rows)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tout = append(out, item)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate incidents: %w\", err)\n\t}\n\treturn out, nil\n}\n\nfunc (s *Store) GetIncidentByID(id string) (*Incident, error) {\n\trow := s.conn().QueryRow(\n\t\t`SELECT id, connection_id, source_type, source_ref, dedupe_key, title, severity, status, assignee, details, resolution_note,\n\t\t        occurrence_count, first_seen_at, last_seen_at, resolved_at, created_by, created_at, updated_at\n\t\t FROM gov_incidents\n\t\t WHERE id = ?`,\n\t\tid,\n\t)\n\titem, err := scanIncident(row)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &item, nil\n}\n\nfunc (s *Store) CreateIncident(connectionID, sourceType, sourceRef, dedupeKey, title, severity, status, assignee, details, createdBy string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_incidents\n\t\t (id, connection_id, source_type, source_ref, dedupe_key, title, severity, status, assignee, details, occurrence_count, first_seen_at, last_seen_at, created_by, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 1, ?, ?, ?, ?, ?)`,\n\t\tid,\n\t\tconnectionID,\n\t\tnullableValue(strings.ToLower(strings.TrimSpace(sourceType))),\n\t\tnullableValue(strings.TrimSpace(sourceRef)),\n\t\tnullableValue(strings.TrimSpace(dedupeKey)),\n\t\tstrings.TrimSpace(title),\n\t\tstrings.ToLower(strings.TrimSpace(severity)),\n\t\tstrings.ToLower(strings.TrimSpace(status)),\n\t\tnullableValue(strings.TrimSpace(assignee)),\n\t\tnullableValue(strings.TrimSpace(details)),\n\t\tnow,\n\t\tnow,\n\t\tnullableValue(strings.TrimSpace(createdBy)),\n\t\tnow,\n\t\tnow,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create incident: %w\", err)\n\t}\n\treturn id, nil\n}\n\nfunc (s *Store) UpdateIncident(id, title, severity, status, assignee, details, resolutionNote string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tvar resolvedAt interface{}\n\tif status == \"resolved\" || status == \"dismissed\" {\n\t\tresolvedAt = now\n\t}\n\tif _, err := s.conn().Exec(\n\t\t`UPDATE gov_incidents\n\t\t SET title = ?, severity = ?, status = ?, assignee = ?, details = ?, resolution_note = ?, resolved_at = ?, updated_at = ?\n\t\t WHERE id = ?`,\n\t\tstrings.TrimSpace(title),\n\t\tstrings.ToLower(strings.TrimSpace(severity)),\n\t\tstrings.ToLower(strings.TrimSpace(status)),\n\t\tnullableValue(strings.TrimSpace(assignee)),\n\t\tnullableValue(strings.TrimSpace(details)),\n\t\tnullableValue(strings.TrimSpace(resolutionNote)),\n\t\tresolvedAt,\n\t\tnow,\n\t\tid,\n\t); err != nil {\n\t\treturn fmt.Errorf(\"update incident: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (s *Store) UpsertIncidentFromViolation(connectionID, sourceRef, policyName, user, severity, detail string) (string, bool, error) {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tdedupeKey := strings.ToLower(strings.TrimSpace(fmt.Sprintf(\"violation:%s:%s:%s\", policyName, user, severity)))\n\trow := s.conn().QueryRow(\n\t\t`SELECT id\n\t\t FROM gov_incidents\n\t\t WHERE connection_id = ?\n\t\t   AND dedupe_key = ?\n\t\t   AND status IN ('open', 'triaged', 'in_progress')\n\t\t ORDER BY last_seen_at DESC\n\t\t LIMIT 1`,\n\t\tconnectionID, dedupeKey,\n\t)\n\tvar incidentID string\n\tswitch err := row.Scan(&incidentID); err {\n\tcase nil:\n\t\tif _, err := s.conn().Exec(\n\t\t\t`UPDATE gov_incidents\n\t\t\t SET occurrence_count = occurrence_count + 1,\n\t\t\t     last_seen_at = ?,\n\t\t\t     details = COALESCE(?, details),\n\t\t\t     updated_at = ?\n\t\t\t WHERE id = ?`,\n\t\t\tnow,\n\t\t\tnullableValue(strings.TrimSpace(detail)),\n\t\t\tnow,\n\t\t\tincidentID,\n\t\t); err != nil {\n\t\t\treturn \"\", false, fmt.Errorf(\"update existing incident from violation: %w\", err)\n\t\t}\n\t\treturn incidentID, false, nil\n\tcase sql.ErrNoRows:\n\t\ttitle := fmt.Sprintf(\"Policy violation: %s (%s)\", strings.TrimSpace(policyName), strings.TrimSpace(user))\n\t\tif strings.TrimSpace(policyName) == \"\" {\n\t\t\ttitle = fmt.Sprintf(\"Policy violation (%s)\", strings.TrimSpace(user))\n\t\t}\n\t\tid, err := s.CreateIncident(\n\t\t\tconnectionID,\n\t\t\t\"violation\",\n\t\t\tsourceRef,\n\t\t\tdedupeKey,\n\t\t\ttitle,\n\t\t\tstrings.ToLower(strings.TrimSpace(severity)),\n\t\t\t\"open\",\n\t\t\t\"\",\n\t\t\tdetail,\n\t\t\t\"system\",\n\t\t)\n\t\tif err != nil {\n\t\t\treturn \"\", false, err\n\t\t}\n\t\treturn id, true, nil\n\tdefault:\n\t\treturn \"\", false, fmt.Errorf(\"find existing incident from violation: %w\", err)\n\t}\n}\n\nfunc (s *Store) GetViolationByID(id string) (*PolicyViolation, error) {\n\trow := s.conn().QueryRow(\n\t\t`SELECT v.id, v.connection_id, v.policy_id, v.query_log_id, v.ch_user, v.violation_detail, v.severity, v.detection_phase, v.request_endpoint, v.detected_at, v.created_at, COALESCE(p.name, '')\n\t\t FROM gov_policy_violations v\n\t\t LEFT JOIN gov_policies p ON p.id = v.policy_id\n\t\t WHERE v.id = ?`,\n\t\tid,\n\t)\n\tvar v PolicyViolation\n\tvar queryLogID, requestEndpoint sql.NullString\n\terr := row.Scan(&v.ID, &v.ConnectionID, &v.PolicyID, &queryLogID, &v.User, &v.ViolationDetail, &v.Severity, &v.DetectionPhase, &requestEndpoint, &v.DetectedAt, &v.CreatedAt, &v.PolicyName)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get violation by id: %w\", err)\n\t}\n\tv.QueryLogID = queryLogID.String\n\tv.RequestEndpoint = nullStringToPtr(requestEndpoint)\n\tv.DetectionPhase = normalizeDetectionPhase(v.DetectionPhase)\n\treturn &v, nil\n}\n\nfunc (s *Store) ListIncidentComments(incidentID string, limit int) ([]IncidentComment, error) {\n\tif limit <= 0 {\n\t\tlimit = 200\n\t}\n\tif limit > 2000 {\n\t\tlimit = 2000\n\t}\n\trows, err := s.conn().Query(\n\t\t`SELECT id, incident_id, comment_text, created_by, created_at\n\t\t FROM gov_incident_comments\n\t\t WHERE incident_id = ?\n\t\t ORDER BY created_at ASC\n\t\t LIMIT ?`,\n\t\tincidentID, limit,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"list incident comments: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tout := make([]IncidentComment, 0)\n\tfor rows.Next() {\n\t\tvar item IncidentComment\n\t\tvar createdBy sql.NullString\n\t\tif err := rows.Scan(&item.ID, &item.IncidentID, &item.CommentText, &createdBy, &item.CreatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan incident comment: %w\", err)\n\t\t}\n\t\titem.CreatedBy = nullStringToPtr(createdBy)\n\t\tout = append(out, item)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate incident comments: %w\", err)\n\t}\n\treturn out, nil\n}\n\nfunc (s *Store) CreateIncidentComment(incidentID, commentText, createdBy string) (string, error) {\n\tid := uuid.NewString()\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tif _, err := s.conn().Exec(\n\t\t`INSERT INTO gov_incident_comments (id, incident_id, comment_text, created_by, created_at)\n\t\t VALUES (?, ?, ?, ?, ?)`,\n\t\tid,\n\t\tincidentID,\n\t\tstrings.TrimSpace(commentText),\n\t\tnullableValue(strings.TrimSpace(createdBy)),\n\t\tnow,\n\t); err != nil {\n\t\treturn \"\", fmt.Errorf(\"create incident comment: %w\", err)\n\t}\n\tif _, err := s.conn().Exec(\n\t\t`UPDATE gov_incidents\n\t\t SET updated_at = ?\n\t\t WHERE id = ?`,\n\t\tnow, incidentID,\n\t); err != nil {\n\t\treturn \"\", fmt.Errorf(\"touch incident after comment: %w\", err)\n\t}\n\treturn id, nil\n}\n\nfunc scanIncident(scanner interface {\n\tScan(dest ...interface{}) error\n}) (Incident, error) {\n\tvar item Incident\n\tvar sourceRef, dedupeKey, assignee, details, resolutionNote, resolvedAt, createdBy sql.NullString\n\terr := scanner.Scan(\n\t\t&item.ID,\n\t\t&item.ConnectionID,\n\t\t&item.SourceType,\n\t\t&sourceRef,\n\t\t&dedupeKey,\n\t\t&item.Title,\n\t\t&item.Severity,\n\t\t&item.Status,\n\t\t&assignee,\n\t\t&details,\n\t\t&resolutionNote,\n\t\t&item.OccurrenceCount,\n\t\t&item.FirstSeenAt,\n\t\t&item.LastSeenAt,\n\t\t&resolvedAt,\n\t\t&createdBy,\n\t\t&item.CreatedAt,\n\t\t&item.UpdatedAt,\n\t)\n\tif err != nil {\n\t\treturn item, err\n\t}\n\titem.SourceRef = nullStringToPtr(sourceRef)\n\titem.DedupeKey = nullStringToPtr(dedupeKey)\n\titem.Assignee = nullStringToPtr(assignee)\n\titem.Details = nullStringToPtr(details)\n\titem.ResolutionNote = nullStringToPtr(resolutionNote)\n\titem.ResolvedAt = nullStringToPtr(resolvedAt)\n\titem.CreatedBy = nullStringToPtr(createdBy)\n\treturn item, nil\n}\n\nfunc nullableValue(v string) interface{} {\n\ttrimmed := strings.TrimSpace(v)\n\tif trimmed == \"\" {\n\t\treturn nil\n\t}\n\treturn trimmed\n}\n"
  },
  {
    "path": "internal/governance/lineage.go",
    "content": "package governance\n\nimport (\n\t\"regexp\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// ── Regex patterns for table references ─────────────────────────────────────\n\n// tableRef matches a possibly qualified table name: [db.]table\n// Handles both backtick-quoted and unquoted identifiers.\nconst tableRefPattern = `((?:` + \"`\" + `[^` + \"`\" + `]+` + \"`\" + `|[a-zA-Z_][a-zA-Z0-9_]*)(?:\\.(?:` + \"`\" + `[^` + \"`\" + `]+` + \"`\" + `|[a-zA-Z_][a-zA-Z0-9_]*))?)`\n\nvar (\n\tfromRe   = regexp.MustCompile(`(?i)\\bFROM\\s+` + tableRefPattern)\n\tjoinRe   = regexp.MustCompile(`(?i)\\bJOIN\\s+` + tableRefPattern)\n\tinsertRe = regexp.MustCompile(`(?i)\\bINSERT\\s+INTO\\s+` + tableRefPattern)\n\tcreateRe = regexp.MustCompile(`(?i)\\bCREATE\\s+(?:TABLE|MATERIALIZED\\s+VIEW)\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?` + tableRefPattern)\n)\n\n// tableRefParsed holds a parsed database.table reference.\ntype tableRefParsed struct {\n\tDatabase string\n\tTable    string\n}\n\n// ── Public API ──────────────────────────────────────────────────────────────\n\n// ExtractLineage analyses a query log entry and returns any lineage edges\n// that can be inferred from the SQL text.  Only INSERT INTO ... SELECT and\n// CREATE TABLE/MATERIALIZED VIEW ... AS SELECT produce edges; plain SELECTs\n// are reads and do not generate edges.\nfunc ExtractLineage(connectionID string, entry QueryLogEntry) []LineageEdge {\n\tquery := normaliseWhitespace(entry.QueryText)\n\n\t// Determine target table (INSERT INTO / CREATE TABLE|MV).\n\ttarget := extractTarget(query)\n\tif target == nil {\n\t\t// Plain SELECT or DDL without a target — no lineage edges.\n\t\treturn nil\n\t}\n\n\t// Determine edge type.\n\tedgeType := classifyEdgeType(query)\n\n\t// Collect source tables (FROM / JOIN), excluding the target itself and\n\t// system tables.\n\tsources := extractSourceTables(query)\n\tif len(sources) == 0 {\n\t\treturn nil\n\t}\n\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar edges []LineageEdge\n\tfor _, src := range sources {\n\t\t// Skip self-references and system tables.\n\t\tif src.Database == target.Database && src.Table == target.Table {\n\t\t\tcontinue\n\t\t}\n\t\tif isSystemTable(src.Database, src.Table) {\n\t\t\tcontinue\n\t\t}\n\n\t\tedges = append(edges, LineageEdge{\n\t\t\tID:             uuid.New().String(),\n\t\t\tConnectionID:   connectionID,\n\t\t\tSourceDatabase: src.Database,\n\t\t\tSourceTable:    src.Table,\n\t\t\tTargetDatabase: target.Database,\n\t\t\tTargetTable:    target.Table,\n\t\t\tQueryID:        entry.QueryID,\n\t\t\tUser:           entry.User,\n\t\t\tEdgeType:       string(edgeType),\n\t\t\tDetectedAt:     now,\n\t\t})\n\t}\n\n\treturn edges\n}\n\n// ── Column-level lineage ───────────────────────────────────────────────────\n\n// ColumnMapping represents a source→target column mapping within a lineage edge.\ntype ColumnMapping struct {\n\tSourceColumn string\n\tTargetColumn string\n}\n\n// LineageResult bundles a table-level edge with its column mappings.\ntype LineageResult struct {\n\tEdge           LineageEdge\n\tColumnMappings []ColumnMapping\n}\n\nvar (\n\t// Matches INSERT INTO table (col1, col2, ...) — captures the parenthesized column list.\n\tinsertColsRe = regexp.MustCompile(`(?i)\\bINSERT\\s+INTO\\s+` + tableRefPattern + `\\s*\\(([^)]+)\\)`)\n\t// Matches SELECT ... FROM — captures everything between SELECT and FROM.\n\tselectClauseRe = regexp.MustCompile(`(?i)\\bSELECT\\s+(.*?)\\s+FROM\\b`)\n)\n\n// ExtractColumnLineage attempts to extract column-level mappings from an\n// INSERT INTO (cols) SELECT cols FROM ... pattern. Returns nil when the\n// pattern doesn't match or column counts differ (graceful degradation).\nfunc ExtractColumnLineage(query string) []ColumnMapping {\n\tnormalized := normaliseWhitespace(query)\n\n\t// Extract target columns from INSERT INTO table (col1, col2, ...)\n\tinsertMatch := insertColsRe.FindStringSubmatch(normalized)\n\tif insertMatch == nil {\n\t\treturn nil\n\t}\n\t// The column list is in the last capture group.\n\ttargetColsRaw := insertMatch[len(insertMatch)-1]\n\ttargetCols := splitAndTrimColumns(targetColsRaw)\n\tif len(targetCols) == 0 {\n\t\treturn nil\n\t}\n\n\t// Extract source columns from SELECT clause.\n\tselectMatch := selectClauseRe.FindStringSubmatch(normalized)\n\tif selectMatch == nil || len(selectMatch) < 2 {\n\t\treturn nil\n\t}\n\tsourceExprs := splitSelectExpressions(selectMatch[1])\n\tif len(sourceExprs) == 0 {\n\t\treturn nil\n\t}\n\n\t// Only zip when counts match — avoids incorrect mappings.\n\tif len(targetCols) != len(sourceExprs) {\n\t\treturn nil\n\t}\n\n\tmappings := make([]ColumnMapping, 0, len(targetCols))\n\tfor i, target := range targetCols {\n\t\tsource := extractColumnName(sourceExprs[i])\n\t\tif source == \"\" || source == \"*\" {\n\t\t\treturn nil // SELECT * or unparseable expression\n\t\t}\n\t\tmappings = append(mappings, ColumnMapping{\n\t\t\tSourceColumn: source,\n\t\t\tTargetColumn: target,\n\t\t})\n\t}\n\treturn mappings\n}\n\n// ExtractLineageWithColumns is like ExtractLineage but also returns column mappings.\nfunc ExtractLineageWithColumns(connectionID string, entry QueryLogEntry) []LineageResult {\n\tedges := ExtractLineage(connectionID, entry)\n\tif len(edges) == 0 {\n\t\treturn nil\n\t}\n\n\tcolumnMappings := ExtractColumnLineage(entry.QueryText)\n\n\tresults := make([]LineageResult, 0, len(edges))\n\tfor _, edge := range edges {\n\t\tresults = append(results, LineageResult{\n\t\t\tEdge:           edge,\n\t\t\tColumnMappings: columnMappings, // Same mappings apply to all edges from this query\n\t\t})\n\t}\n\treturn results\n}\n\n// splitAndTrimColumns splits a comma-separated column list and trims whitespace/backticks.\nfunc splitAndTrimColumns(s string) []string {\n\tparts := strings.Split(s, \",\")\n\tvar result []string\n\tfor _, p := range parts {\n\t\tcol := strings.TrimSpace(p)\n\t\tcol = stripBackticks(col)\n\t\tif col != \"\" {\n\t\t\tresult = append(result, col)\n\t\t}\n\t}\n\treturn result\n}\n\n// splitSelectExpressions splits SELECT expressions by commas, respecting\n// parenthesized sub-expressions (e.g., function calls).\nfunc splitSelectExpressions(s string) []string {\n\tvar result []string\n\tdepth := 0\n\tstart := 0\n\tfor i := 0; i < len(s); i++ {\n\t\tswitch s[i] {\n\t\tcase '(':\n\t\t\tdepth++\n\t\tcase ')':\n\t\t\tdepth--\n\t\tcase ',':\n\t\t\tif depth == 0 {\n\t\t\t\texpr := strings.TrimSpace(s[start:i])\n\t\t\t\tif expr != \"\" {\n\t\t\t\t\tresult = append(result, expr)\n\t\t\t\t}\n\t\t\t\tstart = i + 1\n\t\t\t}\n\t\t}\n\t}\n\t// Last expression\n\texpr := strings.TrimSpace(s[start:])\n\tif expr != \"\" {\n\t\tresult = append(result, expr)\n\t}\n\treturn result\n}\n\n// extractColumnName extracts the effective column name from a SELECT expression.\n// Handles: \"col\", \"t.col\", \"expr AS alias\", \"expr alias\".\nfunc extractColumnName(expr string) string {\n\texpr = strings.TrimSpace(expr)\n\tif expr == \"\" {\n\t\treturn \"\"\n\t}\n\n\t// Check for AS alias (case-insensitive).\n\tasRe := regexp.MustCompile(`(?i)\\bAS\\s+` + \"(`[^`]+`|[a-zA-Z_][a-zA-Z0-9_]*)\" + `\\s*$`)\n\tif m := asRe.FindStringSubmatch(expr); len(m) > 1 {\n\t\treturn stripBackticks(m[1])\n\t}\n\n\t// If no parens and no operators, take the last dotted part.\n\tif !strings.ContainsAny(expr, \"()+*/-\") {\n\t\tparts := strings.Fields(expr)\n\t\tlast := parts[len(parts)-1]\n\t\tdotParts := strings.Split(last, \".\")\n\t\treturn stripBackticks(dotParts[len(dotParts)-1])\n\t}\n\n\treturn \"\"\n}\n\n// ── Internal helpers ────────────────────────────────────────────────────────\n\n// extractTarget returns the target table for INSERT INTO or CREATE TABLE/MV\n// statements.  Returns nil when none is found.\nfunc extractTarget(query string) *tableRefParsed {\n\t// Try INSERT INTO first.\n\tif m := insertRe.FindStringSubmatch(query); len(m) > 1 {\n\t\tdb, tbl := parseTableRef(m[1:])\n\t\treturn &tableRefParsed{Database: db, Table: tbl}\n\t}\n\t// Try CREATE TABLE / MATERIALIZED VIEW.\n\tif m := createRe.FindStringSubmatch(query); len(m) > 1 {\n\t\tdb, tbl := parseTableRef(m[1:])\n\t\treturn &tableRefParsed{Database: db, Table: tbl}\n\t}\n\treturn nil\n}\n\n// classifyEdgeType returns the edge type string based on the SQL verb.\nfunc classifyEdgeType(query string) EdgeType {\n\tupper := strings.ToUpper(query)\n\tif strings.Contains(upper, \"INSERT\") {\n\t\treturn EdgeInsertSelect\n\t}\n\tif strings.Contains(upper, \"CREATE\") {\n\t\treturn EdgeCreateAsSelect\n\t}\n\treturn EdgeSelectFrom\n}\n\n// extractSourceTables finds all FROM and JOIN table references in the query.\nfunc extractSourceTables(query string) []tableRefParsed {\n\tseen := map[string]bool{}\n\tvar results []tableRefParsed\n\n\taddMatches := func(re *regexp.Regexp) {\n\t\tfor _, m := range re.FindAllStringSubmatch(query, -1) {\n\t\t\tif len(m) < 2 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tdb, tbl := parseTableRef(m[1:])\n\t\t\tif isSystemTable(db, tbl) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tkey := db + \".\" + tbl\n\t\t\tif seen[key] {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tseen[key] = true\n\t\t\tresults = append(results, tableRefParsed{Database: db, Table: tbl})\n\t\t}\n\t}\n\n\taddMatches(fromRe)\n\taddMatches(joinRe)\n\n\treturn results\n}\n\n// parseTableRef takes the captured groups from a table-reference regex match\n// and splits them into (database, table).  If no database qualifier is\n// present, database is returned as an empty string.\nfunc parseTableRef(groups []string) (database, table string) {\n\tif len(groups) == 0 {\n\t\treturn \"\", \"\"\n\t}\n\traw := groups[0]\n\traw = stripBackticks(raw)\n\n\tparts := strings.SplitN(raw, \".\", 2)\n\tif len(parts) == 2 {\n\t\treturn stripBackticks(parts[0]), stripBackticks(parts[1])\n\t}\n\treturn \"\", stripBackticks(parts[0])\n}\n\n// stripBackticks removes surrounding backticks from an identifier.\nfunc stripBackticks(s string) string {\n\tif len(s) >= 2 && s[0] == '`' && s[len(s)-1] == '`' {\n\t\treturn s[1 : len(s)-1]\n\t}\n\treturn s\n}\n\n// isSystemTable returns true for ClickHouse system and information_schema\n// databases that should be excluded from lineage graphs.\nfunc isSystemTable(db, table string) bool {\n\tlower := strings.ToLower(db)\n\tswitch lower {\n\tcase \"system\", \"information_schema\", \"information_schema_upper\", \"INFORMATION_SCHEMA\":\n\t\treturn true\n\t}\n\t// Also filter tables that look like system tables when no db is specified.\n\tif db == \"\" {\n\t\tlowerT := strings.ToLower(table)\n\t\tif strings.HasPrefix(lowerT, \"system.\") || strings.HasPrefix(lowerT, \"information_schema.\") {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}\n\n// normaliseWhitespace collapses runs of whitespace into single spaces and\n// trims the result.  This simplifies regex matching.\nfunc normaliseWhitespace(s string) string {\n\treturn strings.Join(strings.Fields(s), \" \")\n}\n"
  },
  {
    "path": "internal/governance/policy_engine.go",
    "content": "package governance\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// ── Store interface for policy evaluation ───────────────────────────────────\n// Store is expected to be defined elsewhere in the package (e.g., store.go).\n// For now we declare a minimal interface so the policy engine compiles\n// independently.  Replace with the concrete type once the store is wired up.\n\n// PolicyStore is the interface the policy engine needs from the governance\n// data store.\ntype PolicyStore interface {\n\tGetAccessMatrixForUser(connectionID, userName string) ([]AccessMatrixEntry, error)\n}\n\n// ── Public API ──────────────────────────────────────────────────────────────\n\n// EvaluatePolicies checks a query log entry against all provided policies and\n// returns any violations.  It uses the store to look up the user's roles in\n// the access matrix.\nfunc EvaluatePolicies(connectionID string, entry QueryLogEntry, policies []Policy, store PolicyStore) []PolicyViolation {\n\t// Parse the tables_used JSON field.\n\ttablesUsed := parseTablesUsed(entry.TablesUsed)\n\n\t// Retrieve the user's roles / privileges from the access matrix.\n\tmatrixEntries, err := store.GetAccessMatrixForUser(connectionID, entry.User)\n\tif err != nil {\n\t\t// If we can't resolve roles we can't evaluate — return empty.\n\t\treturn nil\n\t}\n\n\tuserRoles := collectUserRoles(matrixEntries)\n\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar violations []PolicyViolation\n\tfor _, policy := range policies {\n\t\tif !policy.Enabled {\n\t\t\tcontinue\n\t\t}\n\t\tif normalizePolicyEnforcementMode(policy.EnforcementMode) == \"block\" {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Check whether the query touches the object protected by this policy.\n\t\tif !queryTouchesObject(tablesUsed, entry.QueryText, policy) {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Check whether the user holds the required role.\n\t\tif hasRole(userRoles, policy.RequiredRole) {\n\t\t\tcontinue\n\t\t}\n\n\t\t// No required role found → create a violation.\n\t\tdetail := fmt.Sprintf(\n\t\t\t\"User %q executed a query touching %s without required role %q\",\n\t\t\tentry.User,\n\t\t\tdescribePolicyObject(policy),\n\t\t\tpolicy.RequiredRole,\n\t\t)\n\n\t\tviolations = append(violations, PolicyViolation{\n\t\t\tID:              uuid.New().String(),\n\t\t\tConnectionID:    connectionID,\n\t\t\tPolicyID:        policy.ID,\n\t\t\tQueryLogID:      entry.ID,\n\t\t\tUser:            entry.User,\n\t\t\tViolationDetail: detail,\n\t\t\tSeverity:        policy.Severity,\n\t\t\tDetectedAt:      now,\n\t\t\tCreatedAt:       now,\n\t\t\tPolicyName:      policy.Name,\n\t\t})\n\t}\n\n\treturn violations\n}\n\n// ── Internal helpers ────────────────────────────────────────────────────────\n\n// parseTablesUsed deserialises the JSON array stored in QueryLogEntry.TablesUsed.\n// It returns an empty slice on error or empty input.\nfunc parseTablesUsed(raw string) []string {\n\tif raw == \"\" || raw == \"[]\" {\n\t\treturn nil\n\t}\n\tvar tables []string\n\tif err := json.Unmarshal([]byte(raw), &tables); err != nil {\n\t\treturn nil\n\t}\n\treturn tables\n}\n\n// collectUserRoles extracts the set of distinct role names from access matrix\n// entries.  Both direct grants (role_name is set) and privilege names are\n// collected so we can match on either.\nfunc collectUserRoles(entries []AccessMatrixEntry) map[string]bool {\n\troles := make(map[string]bool, len(entries))\n\tfor _, e := range entries {\n\t\tif e.RoleName != nil && *e.RoleName != \"\" {\n\t\t\troles[strings.ToLower(*e.RoleName)] = true\n\t\t}\n\t}\n\treturn roles\n}\n\n// hasRole checks whether the user's role set contains the required role\n// (case-insensitive comparison).\nfunc hasRole(userRoles map[string]bool, requiredRole string) bool {\n\treturn userRoles[strings.ToLower(requiredRole)]\n}\n\n// queryTouchesObject determines whether a query (identified by its list of\n// tables used and the raw SQL text) accesses the object described by a policy.\nfunc queryTouchesObject(tablesUsed []string, queryText string, policy Policy) bool {\n\tswitch strings.ToLower(policy.ObjectType) {\n\tcase \"database\":\n\t\treturn touchesDatabase(tablesUsed, deref(policy.ObjectDatabase))\n\n\tcase \"table\":\n\t\treturn touchesTable(tablesUsed, deref(policy.ObjectDatabase), deref(policy.ObjectTable))\n\n\tcase \"column\":\n\t\tif !touchesTable(tablesUsed, deref(policy.ObjectDatabase), deref(policy.ObjectTable)) {\n\t\t\treturn false\n\t\t}\n\t\t// For column-level policies, check if the column name appears in the\n\t\t// query text.  This is a heuristic — a full parser would be needed\n\t\t// for perfect accuracy.\n\t\tcol := deref(policy.ObjectColumn)\n\t\tif col == \"\" {\n\t\t\treturn false\n\t\t}\n\t\treturn columnMentioned(queryText, col)\n\n\tdefault:\n\t\treturn false\n\t}\n}\n\n// touchesDatabase returns true if any entry in tablesUsed belongs to the\n// given database.  tablesUsed entries are expected in \"db.table\" format.\nfunc touchesDatabase(tablesUsed []string, database string) bool {\n\tif database == \"\" {\n\t\treturn false\n\t}\n\tlowerDB := strings.ToLower(database)\n\tfor _, t := range tablesUsed {\n\t\tparts := strings.SplitN(t, \".\", 2)\n\t\tif len(parts) == 2 && strings.ToLower(parts[0]) == lowerDB {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}\n\n// touchesTable returns true if the specific db.table combination appears in\n// tablesUsed.\nfunc touchesTable(tablesUsed []string, database, table string) bool {\n\tif table == \"\" {\n\t\treturn false\n\t}\n\tlowerDB := strings.ToLower(database)\n\tlowerTbl := strings.ToLower(table)\n\n\tfor _, t := range tablesUsed {\n\t\tparts := strings.SplitN(t, \".\", 2)\n\t\tswitch {\n\t\tcase len(parts) == 2:\n\t\t\tif strings.ToLower(parts[0]) == lowerDB && strings.ToLower(parts[1]) == lowerTbl {\n\t\t\t\treturn true\n\t\t\t}\n\t\tcase len(parts) == 1:\n\t\t\t// No database qualifier in tablesUsed — match on table name alone\n\t\t\t// only when the policy also has no database.\n\t\t\tif lowerDB == \"\" && strings.ToLower(parts[0]) == lowerTbl {\n\t\t\t\treturn true\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}\n\n// columnMentioned does a case-insensitive check for the column identifier in\n// the query text.  It looks for the column name as a whole word (surrounded\n// by non-identifier characters or string boundaries).\nfunc columnMentioned(queryText, column string) bool {\n\tlower := strings.ToLower(queryText)\n\tcol := strings.ToLower(column)\n\tidx := 0\n\tfor {\n\t\tpos := strings.Index(lower[idx:], col)\n\t\tif pos < 0 {\n\t\t\treturn false\n\t\t}\n\t\tpos += idx\n\t\t// Check word boundaries.\n\t\tstartOK := pos == 0 || !isIdentChar(lower[pos-1])\n\t\tendPos := pos + len(col)\n\t\tendOK := endPos >= len(lower) || !isIdentChar(lower[endPos])\n\t\tif startOK && endOK {\n\t\t\treturn true\n\t\t}\n\t\tidx = pos + 1\n\t}\n}\n\n// isIdentChar returns true for characters that can appear in a SQL identifier.\nfunc isIdentChar(c byte) bool {\n\treturn (c >= 'a' && c <= 'z') ||\n\t\t(c >= 'A' && c <= 'Z') ||\n\t\t(c >= '0' && c <= '9') ||\n\t\tc == '_'\n}\n\n// describePolicyObject returns a human-readable description of the object a\n// policy protects, for use in violation messages.\nfunc describePolicyObject(p Policy) string {\n\tswitch strings.ToLower(p.ObjectType) {\n\tcase \"database\":\n\t\treturn fmt.Sprintf(\"database %q\", deref(p.ObjectDatabase))\n\tcase \"table\":\n\t\tdb := deref(p.ObjectDatabase)\n\t\ttbl := deref(p.ObjectTable)\n\t\tif db != \"\" {\n\t\t\treturn fmt.Sprintf(\"table %q.%q\", db, tbl)\n\t\t}\n\t\treturn fmt.Sprintf(\"table %q\", tbl)\n\tcase \"column\":\n\t\tdb := deref(p.ObjectDatabase)\n\t\ttbl := deref(p.ObjectTable)\n\t\tcol := deref(p.ObjectColumn)\n\t\tif db != \"\" {\n\t\t\treturn fmt.Sprintf(\"column %q.%q.%q\", db, tbl, col)\n\t\t}\n\t\treturn fmt.Sprintf(\"column %q.%q\", tbl, col)\n\tdefault:\n\t\treturn p.ObjectType\n\t}\n}\n\n// deref safely dereferences a string pointer, returning the empty string for nil.\nfunc deref(s *string) string {\n\tif s == nil {\n\t\treturn \"\"\n\t}\n\treturn *s\n}\n"
  },
  {
    "path": "internal/governance/store.go",
    "content": "package governance\n\nimport (\n\t\"database/sql\"\n\t\"fmt\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/google/uuid\"\n)\n\n// nullStringToPtr converts a sql.NullString to a *string (nil if not valid).\nfunc nullStringToPtr(ns sql.NullString) *string {\n\tif ns.Valid {\n\t\treturn &ns.String\n\t}\n\treturn nil\n}\n\n// nullIntToPtr converts a sql.NullInt64 to an *int (nil if not valid).\nfunc nullIntToPtr(ni sql.NullInt64) *int {\n\tif ni.Valid {\n\t\tv := int(ni.Int64)\n\t\treturn &v\n\t}\n\treturn nil\n}\n\n// ptrToNullString converts a *string to a sql.NullString.\nfunc ptrToNullString(s *string) sql.NullString {\n\tif s == nil {\n\t\treturn sql.NullString{}\n\t}\n\treturn sql.NullString{String: *s, Valid: true}\n}\n\n// Store provides all governance CRUD operations against SQLite.\ntype Store struct {\n\tdb *database.DB\n}\n\n// NewStore creates a new governance Store.\nfunc NewStore(db *database.DB) *Store {\n\treturn &Store{db: db}\n}\n\n// Ensure Store satisfies the PolicyStore interface used by the policy engine.\nvar _ PolicyStore = (*Store)(nil)\n\n// conn returns the underlying *sql.DB for running queries.\nfunc (s *Store) conn() *sql.DB {\n\treturn s.db.Conn()\n}\n\n// ── Sync State ───────────────────────────────────────────────────────────────\n\n// GetSyncStates returns all sync states for a connection.\nfunc (s *Store) GetSyncStates(connectionID string) ([]SyncState, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, sync_type, last_synced_at, watermark, status, last_error, row_count, created_at, updated_at\n\t\t FROM gov_sync_state WHERE connection_id = ? ORDER BY sync_type`, connectionID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get sync states: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []SyncState\n\tfor rows.Next() {\n\t\tvar ss SyncState\n\t\tvar lastSynced, watermark, lastError sql.NullString\n\t\tif err := rows.Scan(&ss.ID, &ss.ConnectionID, &ss.SyncType, &lastSynced, &watermark, &ss.Status, &lastError, &ss.RowCount, &ss.CreatedAt, &ss.UpdatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan sync state: %w\", err)\n\t\t}\n\t\tss.LastSyncedAt = nullStringToPtr(lastSynced)\n\t\tss.Watermark = nullStringToPtr(watermark)\n\t\tss.LastError = nullStringToPtr(lastError)\n\t\tresults = append(results, ss)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate sync state rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// GetSyncState returns a specific sync state for a connection and sync type.\nfunc (s *Store) GetSyncState(connectionID string, syncType string) (*SyncState, error) {\n\trow := s.conn().QueryRow(\n\t\t`SELECT id, connection_id, sync_type, last_synced_at, watermark, status, last_error, row_count, created_at, updated_at\n\t\t FROM gov_sync_state WHERE connection_id = ? AND sync_type = ?`, connectionID, syncType,\n\t)\n\n\tvar ss SyncState\n\tvar lastSynced, watermark, lastError sql.NullString\n\terr := row.Scan(&ss.ID, &ss.ConnectionID, &ss.SyncType, &lastSynced, &watermark, &ss.Status, &lastError, &ss.RowCount, &ss.CreatedAt, &ss.UpdatedAt)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get sync state: %w\", err)\n\t}\n\tss.LastSyncedAt = nullStringToPtr(lastSynced)\n\tss.Watermark = nullStringToPtr(watermark)\n\tss.LastError = nullStringToPtr(lastError)\n\treturn &ss, nil\n}\n\n// UpsertSyncState inserts or updates a sync state record.\nfunc (s *Store) UpsertSyncState(connectionID string, syncType string, status string, watermark *string, lastError *string, rowCount int) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tid := uuid.NewString()\n\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_sync_state (id, connection_id, sync_type, last_synced_at, watermark, status, last_error, row_count, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n\t\t ON CONFLICT(connection_id, sync_type) DO UPDATE SET\n\t\t   last_synced_at = excluded.last_synced_at,\n\t\t   watermark = COALESCE(excluded.watermark, gov_sync_state.watermark),\n\t\t   status = excluded.status,\n\t\t   last_error = excluded.last_error,\n\t\t   row_count = excluded.row_count,\n\t\t   updated_at = excluded.updated_at`,\n\t\tid, connectionID, syncType, now, ptrToNullString(watermark), status, ptrToNullString(lastError), rowCount, now, now,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"upsert sync state: %w\", err)\n\t}\n\treturn nil\n}\n\n// UpdateSyncWatermark updates only the watermark for a specific sync state.\nfunc (s *Store) UpdateSyncWatermark(connectionID string, syncType string, watermark string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\t_, err := s.conn().Exec(\n\t\t`UPDATE gov_sync_state SET watermark = ?, updated_at = ? WHERE connection_id = ? AND sync_type = ?`,\n\t\twatermark, now, connectionID, syncType,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update sync watermark: %w\", err)\n\t}\n\treturn nil\n}\n\n// ── Databases ────────────────────────────────────────────────────────────────\n\n// GetDatabases returns all databases for a connection.\nfunc (s *Store) GetDatabases(connectionID string) ([]GovDatabase, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, name, engine, first_seen, last_updated, is_deleted\n\t\t FROM gov_databases WHERE connection_id = ? ORDER BY name`, connectionID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get databases: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []GovDatabase\n\tfor rows.Next() {\n\t\tvar d GovDatabase\n\t\tif err := rows.Scan(&d.ID, &d.ConnectionID, &d.Name, &d.Engine, &d.FirstSeen, &d.LastUpdated, &d.IsDeleted); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan database: %w\", err)\n\t\t}\n\t\tresults = append(results, d)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate database rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// UpsertDatabase inserts or updates a database record from a GovDatabase struct.\nfunc (s *Store) UpsertDatabase(d GovDatabase) error {\n\tisDeleted := 0\n\tif d.IsDeleted {\n\t\tisDeleted = 1\n\t}\n\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_databases (id, connection_id, name, engine, first_seen, last_updated, is_deleted)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?)\n\t\t ON CONFLICT(connection_id, name) DO UPDATE SET\n\t\t   engine = excluded.engine,\n\t\t   last_updated = excluded.last_updated,\n\t\t   is_deleted = excluded.is_deleted`,\n\t\td.ID, d.ConnectionID, d.Name, d.Engine, d.FirstSeen, d.LastUpdated, isDeleted,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"upsert database: %w\", err)\n\t}\n\treturn nil\n}\n\n// MarkDatabaseDeleted soft-deletes a database record.\nfunc (s *Store) MarkDatabaseDeleted(connectionID, name string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\t_, err := s.conn().Exec(\n\t\t`UPDATE gov_databases SET is_deleted = 1, last_updated = ? WHERE connection_id = ? AND name = ?`,\n\t\tnow, connectionID, name,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"mark database deleted: %w\", err)\n\t}\n\treturn nil\n}\n\n// ── Tables ───────────────────────────────────────────────────────────────────\n\n// GetTables returns all non-deleted tables for a connection.\nfunc (s *Store) GetTables(connectionID string) ([]GovTable, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, database_name, table_name, engine, table_uuid, total_rows, total_bytes, partition_count, first_seen, last_updated, is_deleted\n\t\t FROM gov_tables WHERE connection_id = ? AND is_deleted = 0 ORDER BY database_name, table_name`, connectionID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get tables: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\treturn scanTables(rows)\n}\n\n// GetTablesByDatabase returns all non-deleted tables for a specific database.\nfunc (s *Store) GetTablesByDatabase(connectionID, databaseName string) ([]GovTable, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, database_name, table_name, engine, table_uuid, total_rows, total_bytes, partition_count, first_seen, last_updated, is_deleted\n\t\t FROM gov_tables WHERE connection_id = ? AND database_name = ? AND is_deleted = 0 ORDER BY table_name`,\n\t\tconnectionID, databaseName,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get tables by database: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\treturn scanTables(rows)\n}\n\n// GetTableByName returns a single table by connection, database, and table name.\nfunc (s *Store) GetTableByName(connectionID, dbName, tableName string) (*GovTable, error) {\n\trow := s.conn().QueryRow(\n\t\t`SELECT id, connection_id, database_name, table_name, engine, table_uuid, total_rows, total_bytes, partition_count, first_seen, last_updated, is_deleted\n\t\t FROM gov_tables WHERE connection_id = ? AND database_name = ? AND table_name = ?`,\n\t\tconnectionID, dbName, tableName,\n\t)\n\n\tvar t GovTable\n\terr := row.Scan(&t.ID, &t.ConnectionID, &t.DatabaseName, &t.TableName, &t.Engine, &t.TableUUID, &t.TotalRows, &t.TotalBytes, &t.PartitionCount, &t.FirstSeen, &t.LastUpdated, &t.IsDeleted)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get table by name: %w\", err)\n\t}\n\treturn &t, nil\n}\n\n// UpsertTable inserts or updates a table record from a GovTable struct.\nfunc (s *Store) UpsertTable(t GovTable) error {\n\tisDeleted := 0\n\tif t.IsDeleted {\n\t\tisDeleted = 1\n\t}\n\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_tables (id, connection_id, database_name, table_name, engine, table_uuid, total_rows, total_bytes, partition_count, first_seen, last_updated, is_deleted)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n\t\t ON CONFLICT(connection_id, database_name, table_name) DO UPDATE SET\n\t\t   engine = excluded.engine,\n\t\t   table_uuid = excluded.table_uuid,\n\t\t   total_rows = excluded.total_rows,\n\t\t   total_bytes = excluded.total_bytes,\n\t\t   partition_count = excluded.partition_count,\n\t\t   last_updated = excluded.last_updated,\n\t\t   is_deleted = excluded.is_deleted`,\n\t\tt.ID, t.ConnectionID, t.DatabaseName, t.TableName, t.Engine, t.TableUUID,\n\t\tt.TotalRows, t.TotalBytes, t.PartitionCount, t.FirstSeen, t.LastUpdated, isDeleted,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"upsert table: %w\", err)\n\t}\n\treturn nil\n}\n\n// MarkTableDeleted soft-deletes a table record.\nfunc (s *Store) MarkTableDeleted(connectionID, dbName, tableName string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\t_, err := s.conn().Exec(\n\t\t`UPDATE gov_tables SET is_deleted = 1, last_updated = ? WHERE connection_id = ? AND database_name = ? AND table_name = ?`,\n\t\tnow, connectionID, dbName, tableName,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"mark table deleted: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc scanTables(rows *sql.Rows) ([]GovTable, error) {\n\tvar results []GovTable\n\tfor rows.Next() {\n\t\tvar t GovTable\n\t\tif err := rows.Scan(&t.ID, &t.ConnectionID, &t.DatabaseName, &t.TableName, &t.Engine, &t.TableUUID, &t.TotalRows, &t.TotalBytes, &t.PartitionCount, &t.FirstSeen, &t.LastUpdated, &t.IsDeleted); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan table: %w\", err)\n\t\t}\n\t\tresults = append(results, t)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate table rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// ── Columns ──────────────────────────────────────────────────────────────────\n\n// GetColumns returns columns for a connection, optionally filtered by database and table.\n// If dbName and tableName are empty strings, all non-deleted columns for the connection are returned.\nfunc (s *Store) GetColumns(connectionID, dbName, tableName string) ([]GovColumn, error) {\n\tvar query string\n\tvar args []interface{}\n\n\tif dbName == \"\" && tableName == \"\" {\n\t\tquery = `SELECT id, connection_id, database_name, table_name, column_name, column_type, column_position, default_kind, default_expression, comment, first_seen, last_updated, is_deleted\n\t\t\t FROM gov_columns WHERE connection_id = ? AND is_deleted = 0 ORDER BY database_name, table_name, column_position`\n\t\targs = []interface{}{connectionID}\n\t} else {\n\t\tquery = `SELECT id, connection_id, database_name, table_name, column_name, column_type, column_position, default_kind, default_expression, comment, first_seen, last_updated, is_deleted\n\t\t\t FROM gov_columns WHERE connection_id = ? AND database_name = ? AND table_name = ? AND is_deleted = 0 ORDER BY column_position`\n\t\targs = []interface{}{connectionID, dbName, tableName}\n\t}\n\n\trows, err := s.conn().Query(query, args...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get columns: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []GovColumn\n\tfor rows.Next() {\n\t\tvar c GovColumn\n\t\tvar defaultKind, defaultExpr, comment sql.NullString\n\t\tif err := rows.Scan(&c.ID, &c.ConnectionID, &c.DatabaseName, &c.TableName, &c.ColumnName, &c.ColumnType, &c.ColumnPosition, &defaultKind, &defaultExpr, &comment, &c.FirstSeen, &c.LastUpdated, &c.IsDeleted); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan column: %w\", err)\n\t\t}\n\t\tc.DefaultKind = nullStringToPtr(defaultKind)\n\t\tc.DefaultExpression = nullStringToPtr(defaultExpr)\n\t\tc.Comment = nullStringToPtr(comment)\n\t\tresults = append(results, c)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate column rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// UpsertColumn inserts or updates a column record from a GovColumn struct.\nfunc (s *Store) UpsertColumn(c GovColumn) error {\n\tisDeleted := 0\n\tif c.IsDeleted {\n\t\tisDeleted = 1\n\t}\n\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_columns (id, connection_id, database_name, table_name, column_name, column_type, column_position, default_kind, default_expression, comment, first_seen, last_updated, is_deleted)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n\t\t ON CONFLICT(connection_id, database_name, table_name, column_name) DO UPDATE SET\n\t\t   column_type = excluded.column_type,\n\t\t   column_position = excluded.column_position,\n\t\t   default_kind = excluded.default_kind,\n\t\t   default_expression = excluded.default_expression,\n\t\t   comment = excluded.comment,\n\t\t   last_updated = excluded.last_updated,\n\t\t   is_deleted = excluded.is_deleted`,\n\t\tc.ID, c.ConnectionID, c.DatabaseName, c.TableName, c.ColumnName, c.ColumnType, c.ColumnPosition,\n\t\tptrToNullString(c.DefaultKind), ptrToNullString(c.DefaultExpression), ptrToNullString(c.Comment),\n\t\tc.FirstSeen, c.LastUpdated, isDeleted,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"upsert column: %w\", err)\n\t}\n\treturn nil\n}\n\n// MarkColumnDeleted soft-deletes a column record.\nfunc (s *Store) MarkColumnDeleted(connectionID, dbName, tableName, colName string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\t_, err := s.conn().Exec(\n\t\t`UPDATE gov_columns SET is_deleted = 1, last_updated = ? WHERE connection_id = ? AND database_name = ? AND table_name = ? AND column_name = ?`,\n\t\tnow, connectionID, dbName, tableName, colName,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"mark column deleted: %w\", err)\n\t}\n\treturn nil\n}\n\n// ── Schema Changes ───────────────────────────────────────────────────────────\n\n// InsertSchemaChange inserts a schema change record from a SchemaChange struct.\nfunc (s *Store) InsertSchemaChange(sc SchemaChange) error {\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_schema_changes (id, connection_id, change_type, database_name, table_name, column_name, old_value, new_value, detected_at, created_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tsc.ID, sc.ConnectionID, sc.ChangeType, sc.DatabaseName, sc.TableName, sc.ColumnName, sc.OldValue, sc.NewValue, sc.DetectedAt, sc.CreatedAt,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"insert schema change: %w\", err)\n\t}\n\treturn nil\n}\n\n// CreateSchemaChange creates a new schema change record with auto-generated ID and timestamps.\nfunc (s *Store) CreateSchemaChange(connectionID string, changeType SchemaChangeType, dbName, tableName, colName, oldVal, newVal string) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tid := uuid.NewString()\n\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_schema_changes (id, connection_id, change_type, database_name, table_name, column_name, old_value, new_value, detected_at, created_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tid, connectionID, string(changeType), dbName, tableName, colName, oldVal, newVal, now, now,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"create schema change: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetSchemaChanges returns recent schema changes for a connection.\nfunc (s *Store) GetSchemaChanges(connectionID string, limit int) ([]SchemaChange, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, change_type, database_name, table_name, column_name, old_value, new_value, detected_at, created_at\n\t\t FROM gov_schema_changes WHERE connection_id = ? ORDER BY detected_at DESC LIMIT ?`,\n\t\tconnectionID, limit,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get schema changes: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []SchemaChange\n\tfor rows.Next() {\n\t\tvar sc SchemaChange\n\t\tif err := rows.Scan(&sc.ID, &sc.ConnectionID, &sc.ChangeType, &sc.DatabaseName, &sc.TableName, &sc.ColumnName, &sc.OldValue, &sc.NewValue, &sc.DetectedAt, &sc.CreatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan schema change: %w\", err)\n\t\t}\n\t\tresults = append(results, sc)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate schema change rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// ── Query Log ────────────────────────────────────────────────────────────────\n\n// BatchInsertQueryLog inserts a batch of query log entries using INSERT OR IGNORE\n// (idempotent by connection_id + query_id).\nfunc (s *Store) BatchInsertQueryLog(connectionID string, entries []QueryLogEntry) error {\n\tif len(entries) == 0 {\n\t\treturn nil\n\t}\n\n\ttx, err := s.conn().Begin()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"begin query log batch: %w\", err)\n\t}\n\tdefer tx.Rollback()\n\n\tstmt, err := tx.Prepare(\n\t\t`INSERT OR IGNORE INTO gov_query_log (id, connection_id, query_id, ch_user, query_text, normalized_hash, query_kind, event_time, duration_ms, read_rows, read_bytes, result_rows, written_rows, written_bytes, memory_usage, tables_used, is_error, error_message, created_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"prepare query log insert: %w\", err)\n\t}\n\tdefer stmt.Close()\n\n\tfor _, e := range entries {\n\t\tisError := 0\n\t\tif e.IsError {\n\t\t\tisError = 1\n\t\t}\n\t\t_, err := stmt.Exec(\n\t\t\te.ID, e.ConnectionID, e.QueryID, e.User, e.QueryText, e.NormalizedHash, e.QueryKind,\n\t\t\te.EventTime, e.DurationMs, e.ReadRows, e.ReadBytes, e.ResultRows, e.WrittenRows, e.WrittenBytes,\n\t\t\te.MemoryUsage, e.TablesUsed, isError, ptrToNullString(e.ErrorMessage), e.CreatedAt,\n\t\t)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"insert query log entry: %w\", err)\n\t\t}\n\t}\n\n\tif err := tx.Commit(); err != nil {\n\t\treturn fmt.Errorf(\"commit query log batch: %w\", err)\n\t}\n\treturn nil\n}\n\n// InsertQueryLogBatch is an alias for BatchInsertQueryLog that also returns inserted count.\nfunc (s *Store) InsertQueryLogBatch(entries []QueryLogEntry) (int, error) {\n\tif len(entries) == 0 {\n\t\treturn 0, nil\n\t}\n\n\ttx, err := s.conn().Begin()\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"begin query log batch: %w\", err)\n\t}\n\tdefer tx.Rollback()\n\n\tstmt, err := tx.Prepare(\n\t\t`INSERT OR IGNORE INTO gov_query_log (id, connection_id, query_id, ch_user, query_text, normalized_hash, query_kind, event_time, duration_ms, read_rows, read_bytes, result_rows, written_rows, written_bytes, memory_usage, tables_used, is_error, error_message, created_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"prepare query log insert: %w\", err)\n\t}\n\tdefer stmt.Close()\n\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tinserted := 0\n\n\tfor _, e := range entries {\n\t\tid := e.ID\n\t\tif id == \"\" {\n\t\t\tid = uuid.NewString()\n\t\t}\n\t\tisError := 0\n\t\tif e.IsError {\n\t\t\tisError = 1\n\t\t}\n\t\tcreatedAt := e.CreatedAt\n\t\tif createdAt == \"\" {\n\t\t\tcreatedAt = now\n\t\t}\n\t\tresult, err := stmt.Exec(\n\t\t\tid, e.ConnectionID, e.QueryID, e.User, e.QueryText, e.NormalizedHash, e.QueryKind,\n\t\t\te.EventTime, e.DurationMs, e.ReadRows, e.ReadBytes, e.ResultRows, e.WrittenRows, e.WrittenBytes,\n\t\t\te.MemoryUsage, e.TablesUsed, isError, ptrToNullString(e.ErrorMessage), createdAt,\n\t\t)\n\t\tif err != nil {\n\t\t\treturn 0, fmt.Errorf(\"insert query log entry: %w\", err)\n\t\t}\n\t\taffected, _ := result.RowsAffected()\n\t\tinserted += int(affected)\n\t}\n\n\tif err := tx.Commit(); err != nil {\n\t\treturn 0, fmt.Errorf(\"commit query log batch: %w\", err)\n\t}\n\treturn inserted, nil\n}\n\n// GetQueryLog returns paginated query log entries with optional user/table filters.\n// Returns the entries, total count, and any error.\nfunc (s *Store) GetQueryLog(connectionID string, limit, offset int, user, table string) ([]QueryLogEntry, int, error) {\n\twhere := \"connection_id = ?\"\n\targs := []interface{}{connectionID}\n\n\tif user != \"\" {\n\t\twhere += \" AND ch_user = ?\"\n\t\targs = append(args, user)\n\t}\n\tif table != \"\" {\n\t\twhere += \" AND tables_used LIKE ?\"\n\t\targs = append(args, \"%\"+table+\"%\")\n\t}\n\n\t// Get total count\n\tvar total int\n\tcountArgs := make([]interface{}, len(args))\n\tcopy(countArgs, args)\n\terr := s.conn().QueryRow(\"SELECT COUNT(*) FROM gov_query_log WHERE \"+where, countArgs...).Scan(&total)\n\tif err != nil {\n\t\treturn nil, 0, fmt.Errorf(\"count query log: %w\", err)\n\t}\n\n\t// Get page\n\tquery := fmt.Sprintf(\n\t\t`SELECT id, connection_id, query_id, ch_user, query_text, normalized_hash, query_kind, event_time, duration_ms, read_rows, read_bytes, result_rows, written_rows, written_bytes, memory_usage, tables_used, is_error, error_message, created_at\n\t\t FROM gov_query_log WHERE %s ORDER BY event_time DESC LIMIT ? OFFSET ?`, where,\n\t)\n\targs = append(args, limit, offset)\n\n\trows, err := s.conn().Query(query, args...)\n\tif err != nil {\n\t\treturn nil, 0, fmt.Errorf(\"get query log: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []QueryLogEntry\n\tfor rows.Next() {\n\t\tvar e QueryLogEntry\n\t\tvar errorMsg sql.NullString\n\t\tif err := rows.Scan(&e.ID, &e.ConnectionID, &e.QueryID, &e.User, &e.QueryText, &e.NormalizedHash, &e.QueryKind, &e.EventTime, &e.DurationMs, &e.ReadRows, &e.ReadBytes, &e.ResultRows, &e.WrittenRows, &e.WrittenBytes, &e.MemoryUsage, &e.TablesUsed, &e.IsError, &errorMsg, &e.CreatedAt); err != nil {\n\t\t\treturn nil, 0, fmt.Errorf(\"scan query log entry: %w\", err)\n\t\t}\n\t\te.ErrorMessage = nullStringToPtr(errorMsg)\n\t\tresults = append(results, e)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, 0, fmt.Errorf(\"iterate query log rows: %w\", err)\n\t}\n\treturn results, total, nil\n}\n\n// GetTopQueries returns the top queries grouped by normalized_hash.\nfunc (s *Store) GetTopQueries(connectionID string, limit int) ([]map[string]interface{}, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT\n\t\t\tnormalized_hash,\n\t\t\tCOUNT(*) AS cnt,\n\t\t\tROUND(AVG(duration_ms), 2) AS avg_duration_ms,\n\t\t\tCOALESCE(SUM(read_rows), 0) AS total_read_rows,\n\t\t\tMIN(query_text) AS sample_query,\n\t\t\tMAX(event_time) AS last_seen\n\t\t FROM gov_query_log\n\t\t WHERE connection_id = ? AND normalized_hash != ''\n\t\t GROUP BY normalized_hash\n\t\t ORDER BY cnt DESC\n\t\t LIMIT ?`,\n\t\tconnectionID, limit,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get top queries: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []map[string]interface{}\n\tfor rows.Next() {\n\t\tvar hash, sampleQuery, lastSeen string\n\t\tvar cnt int\n\t\tvar avgDurationMs float64\n\t\tvar totalReadRows int64\n\t\tif err := rows.Scan(&hash, &cnt, &avgDurationMs, &totalReadRows, &sampleQuery, &lastSeen); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan top query: %w\", err)\n\t\t}\n\t\tresults = append(results, map[string]interface{}{\n\t\t\t\"normalized_hash\": hash,\n\t\t\t\"count\":           cnt,\n\t\t\t\"avg_duration_ms\": avgDurationMs,\n\t\t\t\"total_read_rows\": totalReadRows,\n\t\t\t\"sample_query\":    sampleQuery,\n\t\t\t\"last_seen\":       lastSeen,\n\t\t})\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate top query rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// ── Lineage ──────────────────────────────────────────────────────────────────\n\n// InsertLineageEdge inserts a lineage edge using INSERT OR IGNORE.\nfunc (s *Store) InsertLineageEdge(edge LineageEdge) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\t_, err := s.conn().Exec(\n\t\t`INSERT OR IGNORE INTO gov_lineage_edges (id, connection_id, source_database, source_table, target_database, target_table, query_id, ch_user, edge_type, detected_at, created_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tedge.ID, edge.ConnectionID, edge.SourceDatabase, edge.SourceTable, edge.TargetDatabase, edge.TargetTable,\n\t\tedge.QueryID, edge.User, edge.EdgeType, edge.DetectedAt, now,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"insert lineage edge: %w\", err)\n\t}\n\treturn nil\n}\n\n// UpsertLineageEdge is an alias for InsertLineageEdge (INSERT OR IGNORE is idempotent).\nfunc (s *Store) UpsertLineageEdge(edge LineageEdge) error {\n\treturn s.InsertLineageEdge(edge)\n}\n\n// GetLineageForTable returns upstream and downstream edges for a specific table.\nfunc (s *Store) GetLineageForTable(connectionID, dbName, tableName string) ([]LineageEdge, []LineageEdge, error) {\n\tupstreamRows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, source_database, source_table, target_database, target_table, query_id, ch_user, edge_type, detected_at\n\t\t FROM gov_lineage_edges WHERE connection_id = ? AND target_database = ? AND target_table = ?`,\n\t\tconnectionID, dbName, tableName,\n\t)\n\tif err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"get upstream lineage: %w\", err)\n\t}\n\tdefer upstreamRows.Close()\n\n\tupstream, err := scanLineageEdges(upstreamRows)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tdownstreamRows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, source_database, source_table, target_database, target_table, query_id, ch_user, edge_type, detected_at\n\t\t FROM gov_lineage_edges WHERE connection_id = ? AND source_database = ? AND source_table = ?`,\n\t\tconnectionID, dbName, tableName,\n\t)\n\tif err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"get downstream lineage: %w\", err)\n\t}\n\tdefer downstreamRows.Close()\n\n\tdownstream, err := scanLineageEdges(downstreamRows)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\treturn upstream, downstream, nil\n}\n\n// GetFullLineageGraph returns all lineage edges for a connection.\nfunc (s *Store) GetFullLineageGraph(connectionID string) ([]LineageEdge, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, source_database, source_table, target_database, target_table, query_id, ch_user, edge_type, detected_at\n\t\t FROM gov_lineage_edges WHERE connection_id = ? ORDER BY detected_at DESC`,\n\t\tconnectionID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get full lineage graph: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\treturn scanLineageEdges(rows)\n}\n\nfunc scanLineageEdges(rows *sql.Rows) ([]LineageEdge, error) {\n\tvar results []LineageEdge\n\tfor rows.Next() {\n\t\tvar e LineageEdge\n\t\tif err := rows.Scan(&e.ID, &e.ConnectionID, &e.SourceDatabase, &e.SourceTable, &e.TargetDatabase, &e.TargetTable, &e.QueryID, &e.User, &e.EdgeType, &e.DetectedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan lineage edge: %w\", err)\n\t\t}\n\t\tresults = append(results, e)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate lineage edge rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// ── Column Lineage ─────────────────────────────────────────────────────────\n\n// InsertColumnLineageEdge inserts a column-level lineage edge using INSERT OR IGNORE.\nfunc (s *Store) InsertColumnLineageEdge(edge ColumnLineageEdge) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\t_, err := s.conn().Exec(\n\t\t`INSERT OR IGNORE INTO gov_lineage_column_edges (id, lineage_edge_id, connection_id, source_column, target_column, created_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?)`,\n\t\tedge.ID, edge.LineageEdgeID, edge.ConnectionID, edge.SourceColumn, edge.TargetColumn, now,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"insert column lineage edge: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetColumnEdgesForEdgeIDs batch-loads column lineage edges for a set of lineage edge IDs.\nfunc (s *Store) GetColumnEdgesForEdgeIDs(edgeIDs []string) (map[string][]ColumnLineageEdge, error) {\n\tresult := make(map[string][]ColumnLineageEdge)\n\tif len(edgeIDs) == 0 {\n\t\treturn result, nil\n\t}\n\n\tplaceholders := make([]string, len(edgeIDs))\n\targs := make([]interface{}, len(edgeIDs))\n\tfor i, id := range edgeIDs {\n\t\tplaceholders[i] = \"?\"\n\t\targs[i] = id\n\t}\n\n\tquery := `SELECT id, lineage_edge_id, connection_id, source_column, target_column\n\t          FROM gov_lineage_column_edges WHERE lineage_edge_id IN (` + strings.Join(placeholders, \",\") + `)`\n\n\trows, err := s.conn().Query(query, args...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get column lineage edges: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tfor rows.Next() {\n\t\tvar e ColumnLineageEdge\n\t\tif err := rows.Scan(&e.ID, &e.LineageEdgeID, &e.ConnectionID, &e.SourceColumn, &e.TargetColumn); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan column lineage edge: %w\", err)\n\t\t}\n\t\tresult[e.LineageEdgeID] = append(result[e.LineageEdgeID], e)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate column lineage edge rows: %w\", err)\n\t}\n\treturn result, nil\n}\n\n// GetQueryByQueryID returns a single query log entry by its ClickHouse query_id.\nfunc (s *Store) GetQueryByQueryID(connectionID, queryID string) (*QueryLogEntry, error) {\n\trow := s.conn().QueryRow(\n\t\t`SELECT id, connection_id, query_id, ch_user, query_text, normalized_hash, query_kind,\n\t\t        event_time, duration_ms, read_rows, read_bytes, result_rows, written_rows,\n\t\t        written_bytes, memory_usage, tables_used, is_error, error_message, created_at\n\t\t FROM gov_query_log WHERE connection_id = ? AND query_id = ? LIMIT 1`,\n\t\tconnectionID, queryID,\n\t)\n\n\tvar e QueryLogEntry\n\tif err := row.Scan(&e.ID, &e.ConnectionID, &e.QueryID, &e.User, &e.QueryText,\n\t\t&e.NormalizedHash, &e.QueryKind, &e.EventTime, &e.DurationMs,\n\t\t&e.ReadRows, &e.ReadBytes, &e.ResultRows, &e.WrittenRows,\n\t\t&e.WrittenBytes, &e.MemoryUsage, &e.TablesUsed, &e.IsError,\n\t\t&e.ErrorMessage, &e.CreatedAt); err != nil {\n\t\treturn nil, fmt.Errorf(\"get query by query_id: %w\", err)\n\t}\n\treturn &e, nil\n}\n\n// ── Tags ─────────────────────────────────────────────────────────────────────\n\n// GetTags returns all tags for a connection.\nfunc (s *Store) GetTags(connectionID string) ([]TagEntry, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, object_type, database_name, table_name, column_name, tag, tagged_by, created_at\n\t\t FROM gov_tags WHERE connection_id = ? ORDER BY created_at DESC`, connectionID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get tags: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\treturn scanTags(rows)\n}\n\n// GetTagsForTable returns all tags for a specific table.\nfunc (s *Store) GetTagsForTable(connectionID, dbName, tableName string) ([]TagEntry, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, object_type, database_name, table_name, column_name, tag, tagged_by, created_at\n\t\t FROM gov_tags WHERE connection_id = ? AND database_name = ? AND table_name = ? ORDER BY created_at DESC`,\n\t\tconnectionID, dbName, tableName,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get tags for table: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\treturn scanTags(rows)\n}\n\n// GetTagsForColumn returns all tags for a specific column.\nfunc (s *Store) GetTagsForColumn(connectionID, dbName, tableName, colName string) ([]TagEntry, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, object_type, database_name, table_name, column_name, tag, tagged_by, created_at\n\t\t FROM gov_tags WHERE connection_id = ? AND database_name = ? AND table_name = ? AND column_name = ? ORDER BY created_at DESC`,\n\t\tconnectionID, dbName, tableName, colName,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get tags for column: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\treturn scanTags(rows)\n}\n\n// CreateTag creates a new tag entry and returns its ID.\nfunc (s *Store) CreateTag(connectionID, objectType, dbName, tableName, colName string, tag SensitivityTag, taggedBy string) (string, error) {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tid := uuid.NewString()\n\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_tags (id, connection_id, object_type, database_name, table_name, column_name, tag, tagged_by, created_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tid, connectionID, objectType, dbName, tableName, colName, string(tag), taggedBy, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create tag: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// DeleteTag deletes a tag by ID.\nfunc (s *Store) DeleteTag(id string) error {\n\t_, err := s.conn().Exec(\"DELETE FROM gov_tags WHERE id = ?\", id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete tag: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetTaggedTableCount returns the count of distinct tables that have at least one tag.\nfunc (s *Store) GetTaggedTableCount(connectionID string) (int, error) {\n\tvar count int\n\terr := s.conn().QueryRow(\n\t\t`SELECT COUNT(DISTINCT database_name || '.' || table_name) FROM gov_tags WHERE connection_id = ?`,\n\t\tconnectionID,\n\t).Scan(&count)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"get tagged table count: %w\", err)\n\t}\n\treturn count, nil\n}\n\nfunc scanTags(rows *sql.Rows) ([]TagEntry, error) {\n\tvar results []TagEntry\n\tfor rows.Next() {\n\t\tvar t TagEntry\n\t\tif err := rows.Scan(&t.ID, &t.ConnectionID, &t.ObjectType, &t.DatabaseName, &t.TableName, &t.ColumnName, &t.Tag, &t.TaggedBy, &t.CreatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan tag: %w\", err)\n\t\t}\n\t\tresults = append(results, t)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate tag rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// DeleteChUsersForConnection removes all cached ClickHouse users for a connection.\nfunc (s *Store) DeleteChUsersForConnection(connectionID string) error {\n\tif _, err := s.conn().Exec(`DELETE FROM gov_ch_users WHERE connection_id = ?`, connectionID); err != nil {\n\t\treturn fmt.Errorf(\"delete ch users for connection: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteChRolesForConnection removes all cached ClickHouse roles for a connection.\nfunc (s *Store) DeleteChRolesForConnection(connectionID string) error {\n\tif _, err := s.conn().Exec(`DELETE FROM gov_ch_roles WHERE connection_id = ?`, connectionID); err != nil {\n\t\treturn fmt.Errorf(\"delete ch roles for connection: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteRoleGrantsForConnection removes all cached role grants for a connection.\nfunc (s *Store) DeleteRoleGrantsForConnection(connectionID string) error {\n\tif _, err := s.conn().Exec(`DELETE FROM gov_role_grants WHERE connection_id = ?`, connectionID); err != nil {\n\t\treturn fmt.Errorf(\"delete role grants for connection: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeleteGrantsForConnection removes all cached grants for a connection.\nfunc (s *Store) DeleteGrantsForConnection(connectionID string) error {\n\tif _, err := s.conn().Exec(`DELETE FROM gov_grants WHERE connection_id = ?`, connectionID); err != nil {\n\t\treturn fmt.Errorf(\"delete grants for connection: %w\", err)\n\t}\n\treturn nil\n}\n\n// ── CH Users ─────────────────────────────────────────────────────────────────\n\n// UpsertChUser inserts or replaces a ClickHouse user record from a ChUser struct.\nfunc (s *Store) UpsertChUser(u ChUser) error {\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_ch_users (id, connection_id, name, auth_type, host_ip, default_roles, first_seen, last_updated)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n\t\t ON CONFLICT(connection_id, name) DO UPDATE SET\n\t\t   auth_type = excluded.auth_type,\n\t\t   host_ip = excluded.host_ip,\n\t\t   default_roles = excluded.default_roles,\n\t\t   last_updated = excluded.last_updated`,\n\t\tu.ID, u.ConnectionID, u.Name, ptrToNullString(u.AuthType), ptrToNullString(u.HostIP), ptrToNullString(u.DefaultRoles), u.FirstSeen, u.LastUpdated,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"upsert ch user: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetChUsers returns all ClickHouse users for a connection.\nfunc (s *Store) GetChUsers(connectionID string) ([]ChUser, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, name, auth_type, host_ip, default_roles, first_seen, last_updated\n\t\t FROM gov_ch_users WHERE connection_id = ? ORDER BY name`, connectionID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get ch users: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []ChUser\n\tfor rows.Next() {\n\t\tvar u ChUser\n\t\tvar authType, hostIP, defaultRoles sql.NullString\n\t\tif err := rows.Scan(&u.ID, &u.ConnectionID, &u.Name, &authType, &hostIP, &defaultRoles, &u.FirstSeen, &u.LastUpdated); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan ch user: %w\", err)\n\t\t}\n\t\tu.AuthType = nullStringToPtr(authType)\n\t\tu.HostIP = nullStringToPtr(hostIP)\n\t\tu.DefaultRoles = nullStringToPtr(defaultRoles)\n\t\tresults = append(results, u)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate ch user rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// ── CH Roles ─────────────────────────────────────────────────────────────────\n\n// UpsertChRole inserts or replaces a ClickHouse role record from a ChRole struct.\nfunc (s *Store) UpsertChRole(r ChRole) error {\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_ch_roles (id, connection_id, name, first_seen, last_updated)\n\t\t VALUES (?, ?, ?, ?, ?)\n\t\t ON CONFLICT(connection_id, name) DO UPDATE SET\n\t\t   last_updated = excluded.last_updated`,\n\t\tr.ID, r.ConnectionID, r.Name, r.FirstSeen, r.LastUpdated,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"upsert ch role: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetChRoles returns all ClickHouse roles for a connection.\nfunc (s *Store) GetChRoles(connectionID string) ([]ChRole, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, name, first_seen, last_updated\n\t\t FROM gov_ch_roles WHERE connection_id = ? ORDER BY name`, connectionID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get ch roles: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []ChRole\n\tfor rows.Next() {\n\t\tvar r ChRole\n\t\tif err := rows.Scan(&r.ID, &r.ConnectionID, &r.Name, &r.FirstSeen, &r.LastUpdated); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan ch role: %w\", err)\n\t\t}\n\t\tresults = append(results, r)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate ch role rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// ── Role Grants ──────────────────────────────────────────────────────────────\n\n// UpsertRoleGrant inserts or replaces a role grant record from a RoleGrant struct.\nfunc (s *Store) UpsertRoleGrant(rg RoleGrant) error {\n\tisDefaultInt := 0\n\tif rg.IsDefault {\n\t\tisDefaultInt = 1\n\t}\n\twithAdminInt := 0\n\tif rg.WithAdminOption {\n\t\twithAdminInt = 1\n\t}\n\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_role_grants (id, connection_id, user_name, granted_role_name, is_default, with_admin_option, first_seen, last_updated)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n\t\t ON CONFLICT(connection_id, user_name, granted_role_name) DO UPDATE SET\n\t\t   is_default = excluded.is_default,\n\t\t   with_admin_option = excluded.with_admin_option,\n\t\t   last_updated = excluded.last_updated`,\n\t\trg.ID, rg.ConnectionID, rg.UserName, rg.GrantedRoleName, isDefaultInt, withAdminInt, rg.FirstSeen, rg.LastUpdated,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"upsert role grant: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetRoleGrants returns all role grants for a connection.\nfunc (s *Store) GetRoleGrants(connectionID string) ([]RoleGrant, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, user_name, granted_role_name, is_default, with_admin_option, first_seen, last_updated\n\t\t FROM gov_role_grants WHERE connection_id = ? ORDER BY user_name, granted_role_name`, connectionID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get role grants: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []RoleGrant\n\tfor rows.Next() {\n\t\tvar rg RoleGrant\n\t\tif err := rows.Scan(&rg.ID, &rg.ConnectionID, &rg.UserName, &rg.GrantedRoleName, &rg.IsDefault, &rg.WithAdminOption, &rg.FirstSeen, &rg.LastUpdated); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan role grant: %w\", err)\n\t\t}\n\t\tresults = append(results, rg)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate role grant rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// ── Grants ───────────────────────────────────────────────────────────────────\n\n// UpsertGrant inserts or replaces a grant record from a Grant struct.\nfunc (s *Store) UpsertGrant(g Grant) error {\n\tisPartialRevoke := 0\n\tif g.IsPartialRevoke {\n\t\tisPartialRevoke = 1\n\t}\n\tgrantOption := 0\n\tif g.GrantOption {\n\t\tgrantOption = 1\n\t}\n\n\t_, err := s.conn().Exec(\n\t\t`INSERT OR REPLACE INTO gov_grants (id, connection_id, user_name, role_name, access_type, grant_database, grant_table, grant_column, is_partial_revoke, grant_option, first_seen, last_updated)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tg.ID, g.ConnectionID, ptrToNullString(g.UserName), ptrToNullString(g.RoleName),\n\t\tg.AccessType, ptrToNullString(g.GrantDatabase), ptrToNullString(g.GrantTable), ptrToNullString(g.GrantColumn),\n\t\tisPartialRevoke, grantOption, g.FirstSeen, g.LastUpdated,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"upsert grant: %w\", err)\n\t}\n\treturn nil\n}\n\n// GetGrants returns all grants for a connection.\nfunc (s *Store) GetGrants(connectionID string) ([]Grant, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, user_name, role_name, access_type, grant_database, grant_table, grant_column, is_partial_revoke, grant_option, first_seen, last_updated\n\t\t FROM gov_grants WHERE connection_id = ? ORDER BY access_type`, connectionID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get grants: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []Grant\n\tfor rows.Next() {\n\t\tvar g Grant\n\t\tvar userName, roleName, grantDB, grantTable, grantCol sql.NullString\n\t\tif err := rows.Scan(&g.ID, &g.ConnectionID, &userName, &roleName, &g.AccessType, &grantDB, &grantTable, &grantCol, &g.IsPartialRevoke, &g.GrantOption, &g.FirstSeen, &g.LastUpdated); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan grant: %w\", err)\n\t\t}\n\t\tg.UserName = nullStringToPtr(userName)\n\t\tg.RoleName = nullStringToPtr(roleName)\n\t\tg.GrantDatabase = nullStringToPtr(grantDB)\n\t\tg.GrantTable = nullStringToPtr(grantTable)\n\t\tg.GrantColumn = nullStringToPtr(grantCol)\n\t\tresults = append(results, g)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate grant rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// ── Access Matrix ────────────────────────────────────────────────────────────\n\n// RebuildAccessMatrix deletes all access matrix entries for a connection and rebuilds\n// from grants and role_grants joined data. Returns the number of entries created.\nfunc (s *Store) RebuildAccessMatrix(connectionID string) (int, error) {\n\ttx, err := s.conn().Begin()\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"begin access matrix rebuild: %w\", err)\n\t}\n\tdefer tx.Rollback()\n\n\tif _, err := tx.Exec(\"DELETE FROM gov_access_matrix WHERE connection_id = ?\", connectionID); err != nil {\n\t\treturn 0, fmt.Errorf(\"delete access matrix: %w\", err)\n\t}\n\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\t// Preload last query timestamps per user once to avoid N extra lookups while inserting.\n\tlastQueryByUser := make(map[string]sql.NullString)\n\tlastQueryRows, err := tx.Query(\n\t\t`SELECT ch_user, MAX(event_time)\n\t\t FROM gov_query_log\n\t\t WHERE connection_id = ?\n\t\t GROUP BY ch_user`,\n\t\tconnectionID,\n\t)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"query last query times: %w\", err)\n\t}\n\tfor lastQueryRows.Next() {\n\t\tvar userName string\n\t\tvar lastQueryTime sql.NullString\n\t\tif err := lastQueryRows.Scan(&userName, &lastQueryTime); err != nil {\n\t\t\tlastQueryRows.Close()\n\t\t\treturn 0, fmt.Errorf(\"scan last query time: %w\", err)\n\t\t}\n\t\tlastQueryByUser[userName] = lastQueryTime\n\t}\n\tif err := lastQueryRows.Err(); err != nil {\n\t\tlastQueryRows.Close()\n\t\treturn 0, fmt.Errorf(\"iterate last query rows: %w\", err)\n\t}\n\tif err := lastQueryRows.Close(); err != nil {\n\t\treturn 0, fmt.Errorf(\"close last query rows: %w\", err)\n\t}\n\n\tinsertStmt, err := tx.Prepare(\n\t\t`INSERT INTO gov_access_matrix (id, connection_id, user_name, role_name, database_name, table_name, privilege, is_direct_grant, last_query_time, created_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"prepare access matrix insert: %w\", err)\n\t}\n\tdefer insertStmt.Close()\n\n\tcount := 0\n\n\t// Insert direct user grants\n\tdirectRows, err := tx.Query(\n\t\t`SELECT g.user_name, g.grant_database, g.grant_table, g.access_type\n\t\t FROM gov_grants g\n\t\t WHERE g.connection_id = ? AND g.user_name IS NOT NULL AND g.is_partial_revoke = 0`,\n\t\tconnectionID,\n\t)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"query direct grants: %w\", err)\n\t}\n\n\tfor directRows.Next() {\n\t\tvar userName string\n\t\tvar grantDB, grantTable sql.NullString\n\t\tvar accessType string\n\t\tif err := directRows.Scan(&userName, &grantDB, &grantTable, &accessType); err != nil {\n\t\t\tdirectRows.Close()\n\t\t\treturn 0, fmt.Errorf(\"scan direct grant: %w\", err)\n\t\t}\n\n\t\tlastQueryTime := lastQueryByUser[userName]\n\t\tif _, err = insertStmt.Exec(\n\t\t\tuuid.NewString(), connectionID, userName, nil, grantDB, grantTable, accessType, 1, lastQueryTime, now,\n\t\t); err != nil {\n\t\t\tdirectRows.Close()\n\t\t\treturn 0, fmt.Errorf(\"insert direct grant matrix: %w\", err)\n\t\t}\n\t\tcount++\n\t}\n\tif err := directRows.Err(); err != nil {\n\t\tdirectRows.Close()\n\t\treturn 0, fmt.Errorf(\"iterate direct grant rows: %w\", err)\n\t}\n\tif err := directRows.Close(); err != nil {\n\t\treturn 0, fmt.Errorf(\"close direct grant rows: %w\", err)\n\t}\n\n\t// Insert role-based grants\n\troleRows, err := tx.Query(\n\t\t`SELECT rg.user_name, rg.granted_role_name, g.grant_database, g.grant_table, g.access_type\n\t\t FROM gov_role_grants rg\n\t\t JOIN gov_grants g ON g.connection_id = rg.connection_id AND g.role_name = rg.granted_role_name\n\t\t WHERE rg.connection_id = ? AND g.is_partial_revoke = 0`,\n\t\tconnectionID,\n\t)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"query role-based grants: %w\", err)\n\t}\n\n\tfor roleRows.Next() {\n\t\tvar userName, roleName, accessType string\n\t\tvar grantDB, grantTable sql.NullString\n\t\tif err := roleRows.Scan(&userName, &roleName, &grantDB, &grantTable, &accessType); err != nil {\n\t\t\troleRows.Close()\n\t\t\treturn 0, fmt.Errorf(\"scan role grant: %w\", err)\n\t\t}\n\n\t\tlastQueryTime := lastQueryByUser[userName]\n\t\tif _, err = insertStmt.Exec(\n\t\t\tuuid.NewString(), connectionID, userName, roleName, grantDB, grantTable, accessType, 0, lastQueryTime, now,\n\t\t); err != nil {\n\t\t\troleRows.Close()\n\t\t\treturn 0, fmt.Errorf(\"insert role grant matrix: %w\", err)\n\t\t}\n\t\tcount++\n\t}\n\tif err := roleRows.Err(); err != nil {\n\t\troleRows.Close()\n\t\treturn 0, fmt.Errorf(\"iterate role grant rows: %w\", err)\n\t}\n\tif err := roleRows.Close(); err != nil {\n\t\treturn 0, fmt.Errorf(\"close role grant rows: %w\", err)\n\t}\n\n\tif err := tx.Commit(); err != nil {\n\t\treturn 0, fmt.Errorf(\"commit access matrix rebuild: %w\", err)\n\t}\n\treturn count, nil\n}\n\n// GetAccessMatrix returns all access matrix entries for a connection.\nfunc (s *Store) GetAccessMatrix(connectionID string) ([]AccessMatrixEntry, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, user_name, role_name, database_name, table_name, privilege, is_direct_grant, last_query_time\n\t\t FROM gov_access_matrix WHERE connection_id = ? ORDER BY user_name, privilege`, connectionID,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get access matrix: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\treturn scanAccessMatrix(rows)\n}\n\n// GetAccessMatrixForUser returns access matrix entries for a specific user.\nfunc (s *Store) GetAccessMatrixForUser(connectionID, userName string) ([]AccessMatrixEntry, error) {\n\trows, err := s.conn().Query(\n\t\t`SELECT id, connection_id, user_name, role_name, database_name, table_name, privilege, is_direct_grant, last_query_time\n\t\t FROM gov_access_matrix WHERE connection_id = ? AND user_name = ? ORDER BY privilege`,\n\t\tconnectionID, userName,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get access matrix for user: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\treturn scanAccessMatrix(rows)\n}\n\n// UserHasRole checks whether a user has been granted a specific role.\nfunc (s *Store) UserHasRole(connectionID, userName, roleName string) (bool, error) {\n\tvar count int\n\terr := s.conn().QueryRow(\n\t\t`SELECT COUNT(*) FROM gov_role_grants WHERE connection_id = ? AND user_name = ? AND granted_role_name = ?`,\n\t\tconnectionID, userName, roleName,\n\t).Scan(&count)\n\tif err != nil {\n\t\treturn false, fmt.Errorf(\"check user role: %w\", err)\n\t}\n\treturn count > 0, nil\n}\n\n// GetOverPermissions finds access matrix entries where last_query_time is null\n// or older than a default inactivity threshold (30 days).\nfunc (s *Store) GetOverPermissions(connectionID string) ([]OverPermission, error) {\n\treturn s.GetOverPermissionsWithDays(connectionID, 30)\n}\n\n// GetOverPermissionsWithDays finds access matrix entries where last_query_time is null\n// or older than inactiveDays.\nfunc (s *Store) GetOverPermissionsWithDays(connectionID string, inactiveDays int) ([]OverPermission, error) {\n\tcutoff := time.Now().UTC().AddDate(0, 0, -inactiveDays).Format(time.RFC3339)\n\n\trows, err := s.conn().Query(\n\t\t`SELECT user_name, role_name, database_name, table_name, privilege, last_query_time\n\t\t FROM gov_access_matrix\n\t\t WHERE connection_id = ? AND (last_query_time IS NULL OR last_query_time < ?)\n\t\t ORDER BY user_name, privilege`,\n\t\tconnectionID, cutoff,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get over permissions: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []OverPermission\n\tfor rows.Next() {\n\t\tvar op OverPermission\n\t\tvar roleName, dbName, tableName, lastQueryTime sql.NullString\n\t\tif err := rows.Scan(&op.UserName, &roleName, &dbName, &tableName, &op.Privilege, &lastQueryTime); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan over permission: %w\", err)\n\t\t}\n\t\top.RoleName = nullStringToPtr(roleName)\n\t\top.DatabaseName = nullStringToPtr(dbName)\n\t\top.TableName = nullStringToPtr(tableName)\n\t\top.LastQueryTime = nullStringToPtr(lastQueryTime)\n\n\t\tif lastQueryTime.Valid {\n\t\t\tt, parseErr := time.Parse(time.RFC3339, lastQueryTime.String)\n\t\t\tif parseErr == nil {\n\t\t\t\tdays := int(time.Since(t).Hours() / 24)\n\t\t\t\top.DaysSinceQuery = &days\n\t\t\t}\n\t\t\top.Reason = fmt.Sprintf(\"no queries in %d+ days\", inactiveDays)\n\t\t} else {\n\t\t\top.Reason = \"never queried\"\n\t\t}\n\n\t\tresults = append(results, op)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate over permission rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\nfunc scanAccessMatrix(rows *sql.Rows) ([]AccessMatrixEntry, error) {\n\tvar results []AccessMatrixEntry\n\tfor rows.Next() {\n\t\tvar am AccessMatrixEntry\n\t\tvar roleName, dbName, tableName, lastQueryTime sql.NullString\n\t\tif err := rows.Scan(&am.ID, &am.ConnectionID, &am.UserName, &roleName, &dbName, &tableName, &am.Privilege, &am.IsDirectGrant, &lastQueryTime); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan access matrix entry: %w\", err)\n\t\t}\n\t\tam.RoleName = nullStringToPtr(roleName)\n\t\tam.DatabaseName = nullStringToPtr(dbName)\n\t\tam.TableName = nullStringToPtr(tableName)\n\t\tam.LastQueryTime = nullStringToPtr(lastQueryTime)\n\t\tresults = append(results, am)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate access matrix rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// ── Policies ─────────────────────────────────────────────────────────────────\n\n// GetPolicies returns all policies for a connection.\nfunc (s *Store) GetPolicies(connectionID string) ([]Policy, error) {\n\treturn s.scanPolicies(\n\t\t`SELECT id, connection_id, name, description, object_type, object_database, object_table, object_column, required_role, severity, enforcement_mode, enabled, created_by, created_at, updated_at\n\t\t FROM gov_policies WHERE connection_id = ? ORDER BY name`, connectionID,\n\t)\n}\n\n// GetEnabledPolicies returns all enabled policies for a connection.\nfunc (s *Store) GetEnabledPolicies(connectionID string) ([]Policy, error) {\n\treturn s.scanPolicies(\n\t\t`SELECT id, connection_id, name, description, object_type, object_database, object_table, object_column, required_role, severity, enforcement_mode, enabled, created_by, created_at, updated_at\n\t\t FROM gov_policies WHERE connection_id = ? AND enabled = 1 ORDER BY name`, connectionID,\n\t)\n}\n\nfunc (s *Store) scanPolicies(query string, args ...interface{}) ([]Policy, error) {\n\trows, err := s.conn().Query(query, args...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get policies: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []Policy\n\tfor rows.Next() {\n\t\tvar p Policy\n\t\tvar desc, objDB, objTable, objCol, createdBy, enforcementMode sql.NullString\n\t\tif err := rows.Scan(&p.ID, &p.ConnectionID, &p.Name, &desc, &p.ObjectType, &objDB, &objTable, &objCol, &p.RequiredRole, &p.Severity, &enforcementMode, &p.Enabled, &createdBy, &p.CreatedAt, &p.UpdatedAt); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan policy: %w\", err)\n\t\t}\n\t\tp.Description = nullStringToPtr(desc)\n\t\tp.ObjectDatabase = nullStringToPtr(objDB)\n\t\tp.ObjectTable = nullStringToPtr(objTable)\n\t\tp.ObjectColumn = nullStringToPtr(objCol)\n\t\tp.EnforcementMode = normalizePolicyEnforcementMode(enforcementMode.String)\n\t\tp.CreatedBy = nullStringToPtr(createdBy)\n\t\tresults = append(results, p)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate policy rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\n// GetPolicyByID returns a single policy by ID.\nfunc (s *Store) GetPolicyByID(id string) (*Policy, error) {\n\trow := s.conn().QueryRow(\n\t\t`SELECT id, connection_id, name, description, object_type, object_database, object_table, object_column, required_role, severity, enforcement_mode, enabled, created_by, created_at, updated_at\n\t\t FROM gov_policies WHERE id = ?`, id,\n\t)\n\n\tvar p Policy\n\tvar desc, objDB, objTable, objCol, createdBy, enforcementMode sql.NullString\n\terr := row.Scan(&p.ID, &p.ConnectionID, &p.Name, &desc, &p.ObjectType, &objDB, &objTable, &objCol, &p.RequiredRole, &p.Severity, &enforcementMode, &p.Enabled, &createdBy, &p.CreatedAt, &p.UpdatedAt)\n\tif err == sql.ErrNoRows {\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get policy by id: %w\", err)\n\t}\n\tp.Description = nullStringToPtr(desc)\n\tp.ObjectDatabase = nullStringToPtr(objDB)\n\tp.ObjectTable = nullStringToPtr(objTable)\n\tp.ObjectColumn = nullStringToPtr(objCol)\n\tp.EnforcementMode = normalizePolicyEnforcementMode(enforcementMode.String)\n\tp.CreatedBy = nullStringToPtr(createdBy)\n\treturn &p, nil\n}\n\n// CreatePolicy creates a new policy and returns its ID.\nfunc (s *Store) CreatePolicy(connectionID, name, description, objectType, objectDB, objectTable, objectCol, requiredRole, severity, enforcementMode, createdBy string) (string, error) {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tid := uuid.NewString()\n\n\tvar desc, oDB, oTable, oCol, cBy interface{}\n\tif description != \"\" {\n\t\tdesc = description\n\t}\n\tif objectDB != \"\" {\n\t\toDB = objectDB\n\t}\n\tif objectTable != \"\" {\n\t\toTable = objectTable\n\t}\n\tif objectCol != \"\" {\n\t\toCol = objectCol\n\t}\n\tif createdBy != \"\" {\n\t\tcBy = createdBy\n\t}\n\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_policies (id, connection_id, name, description, object_type, object_database, object_table, object_column, required_role, severity, enforcement_mode, enabled, created_by, created_at, updated_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 1, ?, ?, ?)`,\n\t\tid, connectionID, name, desc, objectType, oDB, oTable, oCol, requiredRole, severity, normalizePolicyEnforcementMode(enforcementMode), cBy, now, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create policy: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// UpdatePolicy updates an existing policy.\nfunc (s *Store) UpdatePolicy(id, name, description, requiredRole, severity, enforcementMode string, enabled bool) error {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\n\tvar desc interface{}\n\tif description != \"\" {\n\t\tdesc = description\n\t}\n\n\tenabledInt := 0\n\tif enabled {\n\t\tenabledInt = 1\n\t}\n\n\t_, err := s.conn().Exec(\n\t\t`UPDATE gov_policies SET name = ?, description = ?, required_role = ?, severity = ?, enforcement_mode = ?, enabled = ?, updated_at = ? WHERE id = ?`,\n\t\tname, desc, requiredRole, severity, normalizePolicyEnforcementMode(enforcementMode), enabledInt, now, id,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"update policy: %w\", err)\n\t}\n\treturn nil\n}\n\n// DeletePolicy deletes a policy by ID (cascades to violations).\nfunc (s *Store) DeletePolicy(id string) error {\n\t_, err := s.conn().Exec(\"DELETE FROM gov_policies WHERE id = ?\", id)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"delete policy: %w\", err)\n\t}\n\treturn nil\n}\n\n// ── Violations ───────────────────────────────────────────────────────────────\n\n// InsertPolicyViolation inserts a policy violation from a PolicyViolation struct.\nfunc (s *Store) InsertPolicyViolation(v PolicyViolation) error {\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_policy_violations (id, connection_id, policy_id, query_log_id, ch_user, violation_detail, severity, detection_phase, request_endpoint, detected_at, created_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tv.ID, v.ConnectionID, v.PolicyID, nullableValue(v.QueryLogID), v.User, v.ViolationDetail, v.Severity, normalizeDetectionPhase(v.DetectionPhase), nullableValue(deref(v.RequestEndpoint)), v.DetectedAt, v.CreatedAt,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"insert policy violation: %w\", err)\n\t}\n\treturn nil\n}\n\n// CreateViolation creates a new policy violation and returns its ID.\nfunc (s *Store) CreateViolation(connectionID, policyID, queryLogID, user, detail, severity, detectionPhase, requestEndpoint string) (string, error) {\n\tnow := time.Now().UTC().Format(time.RFC3339)\n\tid := uuid.NewString()\n\n\t_, err := s.conn().Exec(\n\t\t`INSERT INTO gov_policy_violations (id, connection_id, policy_id, query_log_id, ch_user, violation_detail, severity, detection_phase, request_endpoint, detected_at, created_at)\n\t\t VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n\t\tid, connectionID, policyID, nullableValue(queryLogID), user, detail, severity, normalizeDetectionPhase(detectionPhase), nullableValue(requestEndpoint), now, now,\n\t)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create violation: %w\", err)\n\t}\n\treturn id, nil\n}\n\n// GetViolations returns violations for a connection with optional policyID filter,\n// joined with the policy name.\nfunc (s *Store) GetViolations(connectionID string, limit int, policyID string) ([]PolicyViolation, error) {\n\twhere := \"v.connection_id = ?\"\n\targs := []interface{}{connectionID}\n\n\tif policyID != \"\" {\n\t\twhere += \" AND v.policy_id = ?\"\n\t\targs = append(args, policyID)\n\t}\n\n\targs = append(args, limit)\n\n\tquery := fmt.Sprintf(\n\t\t`SELECT v.id, v.connection_id, v.policy_id, v.query_log_id, v.ch_user, v.violation_detail, v.severity, v.detection_phase, v.request_endpoint, v.detected_at, v.created_at, COALESCE(p.name, '')\n\t\t FROM gov_policy_violations v\n\t\t LEFT JOIN gov_policies p ON p.id = v.policy_id\n\t\t WHERE %s\n\t\t ORDER BY v.detected_at DESC\n\t\t LIMIT ?`, where,\n\t)\n\n\trows, err := s.conn().Query(query, args...)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"get violations: %w\", err)\n\t}\n\tdefer rows.Close()\n\n\tvar results []PolicyViolation\n\tfor rows.Next() {\n\t\tvar v PolicyViolation\n\t\tvar queryLogID, requestEndpoint sql.NullString\n\t\tif err := rows.Scan(&v.ID, &v.ConnectionID, &v.PolicyID, &queryLogID, &v.User, &v.ViolationDetail, &v.Severity, &v.DetectionPhase, &requestEndpoint, &v.DetectedAt, &v.CreatedAt, &v.PolicyName); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"scan violation: %w\", err)\n\t\t}\n\t\tv.QueryLogID = queryLogID.String\n\t\tv.RequestEndpoint = nullStringToPtr(requestEndpoint)\n\t\tv.DetectionPhase = normalizeDetectionPhase(v.DetectionPhase)\n\t\tresults = append(results, v)\n\t}\n\tif err := rows.Err(); err != nil {\n\t\treturn nil, fmt.Errorf(\"iterate violation rows: %w\", err)\n\t}\n\treturn results, nil\n}\n\nfunc normalizePolicyEnforcementMode(v string) string {\n\tmode := strings.ToLower(strings.TrimSpace(v))\n\tswitch mode {\n\tcase \"block\":\n\t\treturn \"block\"\n\tdefault:\n\t\treturn \"warn\"\n\t}\n}\n\nfunc normalizeDetectionPhase(v string) string {\n\tphase := strings.ToLower(strings.TrimSpace(v))\n\tswitch phase {\n\tcase \"pre_exec_block\":\n\t\treturn \"pre_exec_block\"\n\tdefault:\n\t\treturn \"post_exec\"\n\t}\n}\n\n// ── Overview ─────────────────────────────────────────────────────────────────\n\n// GetOverview returns aggregate counts from all governance tables for a connection.\nfunc (s *Store) GetOverview(connectionID string) (*GovernanceOverview, error) {\n\to := &GovernanceOverview{}\n\n\ts.conn().QueryRow(\"SELECT COUNT(*) FROM gov_databases WHERE connection_id = ? AND is_deleted = 0\", connectionID).Scan(&o.DatabaseCount)\n\ts.conn().QueryRow(\"SELECT COUNT(*) FROM gov_tables WHERE connection_id = ? AND is_deleted = 0\", connectionID).Scan(&o.TableCount)\n\ts.conn().QueryRow(\"SELECT COUNT(*) FROM gov_columns WHERE connection_id = ? AND is_deleted = 0\", connectionID).Scan(&o.ColumnCount)\n\n\ttagCount, err := s.GetTaggedTableCount(connectionID)\n\tif err == nil {\n\t\to.TaggedTableCount = tagCount\n\t}\n\n\ts.conn().QueryRow(\"SELECT COUNT(*) FROM gov_ch_users WHERE connection_id = ?\", connectionID).Scan(&o.UserCount)\n\ts.conn().QueryRow(\"SELECT COUNT(*) FROM gov_ch_roles WHERE connection_id = ?\", connectionID).Scan(&o.RoleCount)\n\n\tcutoff24h := time.Now().UTC().Add(-24 * time.Hour).Format(time.RFC3339)\n\ts.conn().QueryRow(\"SELECT COUNT(*) FROM gov_query_log WHERE connection_id = ? AND event_time > ?\", connectionID, cutoff24h).Scan(&o.QueryCount24h)\n\n\ts.conn().QueryRow(\"SELECT COUNT(*) FROM gov_lineage_edges WHERE connection_id = ?\", connectionID).Scan(&o.LineageEdgeCount)\n\ts.conn().QueryRow(\"SELECT COUNT(*) FROM gov_policies WHERE connection_id = ?\", connectionID).Scan(&o.PolicyCount)\n\ts.conn().QueryRow(\"SELECT COUNT(*) FROM gov_policy_violations WHERE connection_id = ?\", connectionID).Scan(&o.ViolationCount)\n\ts.conn().QueryRow(\"SELECT COUNT(*) FROM gov_incidents WHERE connection_id = ? AND status IN ('open', 'triaged', 'in_progress')\", connectionID).Scan(&o.IncidentCount)\n\ts.conn().QueryRow(\"SELECT COUNT(*) FROM gov_schema_changes WHERE connection_id = ?\", connectionID).Scan(&o.SchemaChangeCount)\n\n\tsyncStates, err := s.GetSyncStates(connectionID)\n\tif err == nil {\n\t\to.SyncStates = syncStates\n\t}\n\n\trecentChanges, err := s.GetSchemaChanges(connectionID, 10)\n\tif err == nil {\n\t\to.RecentChanges = recentChanges\n\t}\n\n\trecentViolations, err := s.GetViolations(connectionID, 10, \"\")\n\tif err == nil {\n\t\to.RecentViolations = recentViolations\n\t}\n\n\treturn o, nil\n}\n\n// ── Cleanup ──────────────────────────────────────────────────────────────────\n\n// CleanupOldQueryLogs deletes query logs older than the given timestamp.\nfunc (s *Store) CleanupOldQueryLogs(connectionID string, before string) (int64, error) {\n\tresult, err := s.conn().Exec(\n\t\t\"DELETE FROM gov_query_log WHERE connection_id = ? AND event_time < ?\",\n\t\tconnectionID, before,\n\t)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"cleanup old query logs: %w\", err)\n\t}\n\treturn result.RowsAffected()\n}\n\n// CleanupOldViolations deletes violations older than the given timestamp.\nfunc (s *Store) CleanupOldViolations(connectionID string, before string) (int64, error) {\n\tresult, err := s.conn().Exec(\n\t\t\"DELETE FROM gov_policy_violations WHERE connection_id = ? AND detected_at < ?\",\n\t\tconnectionID, before,\n\t)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"cleanup old violations: %w\", err)\n\t}\n\treturn result.RowsAffected()\n}\n"
  },
  {
    "path": "internal/governance/syncer.go",
    "content": "package governance\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n)\n\nconst (\n\tsyncTickInterval = 5 * time.Minute\n\tqueryTimeout     = 60 * time.Second\n\tstaleDuration    = 10 * time.Minute\n\tretentionDays    = 30\n)\n\n// Syncer orchestrates ClickHouse → SQLite governance synchronisation.\n// It runs periodic background syncs and supports on-demand sync for\n// individual connections.\ntype Syncer struct {\n\tstore          *Store\n\tdb             *database.DB\n\tgateway        *tunnel.Gateway\n\tsecret         string\n\tactiveSyncs    sync.Map     // connectionID → bool (prevents concurrent syncs per connection)\n\tlastBorrowLog  sync.Map     // connectionID → time.Time (rate-limits credential borrow audit rows)\n\tmu             sync.Mutex\n\trunning        bool\n\tstopCh         chan struct{}\n}\n\n// NewSyncer creates a new governance Syncer.\nfunc NewSyncer(store *Store, db *database.DB, gw *tunnel.Gateway, secret string) *Syncer {\n\treturn &Syncer{\n\t\tstore:   store,\n\t\tdb:      db,\n\t\tgateway: gw,\n\t\tsecret:  secret,\n\t}\n}\n\n// GetStore returns the underlying governance store.\nfunc (s *Syncer) GetStore() *Store {\n\treturn s.store\n}\n\n// StartBackground launches the background goroutine that ticks every 5 minutes\n// to sync governance data for all connected tunnels. Idempotent: a second call\n// while already running is a no-op.\nfunc (s *Syncer) StartBackground() {\n\ts.mu.Lock()\n\tif s.running {\n\t\ts.mu.Unlock()\n\t\treturn\n\t}\n\ts.stopCh = make(chan struct{})\n\ts.running = true\n\tstopCh := s.stopCh\n\ts.mu.Unlock()\n\n\tgo func() {\n\t\tslog.Info(\"Governance syncer started\", \"interval\", syncTickInterval)\n\n\t\tif connections, err := s.db.GetConnections(); err == nil {\n\t\t\ts.pruneRetention(connections)\n\t\t}\n\n\t\tticker := time.NewTicker(syncTickInterval)\n\t\tdefer ticker.Stop()\n\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-stopCh:\n\t\t\t\tslog.Info(\"Governance syncer stopped\")\n\t\t\t\treturn\n\t\t\tcase <-ticker.C:\n\t\t\t\ts.backgroundTick()\n\t\t\t}\n\t\t}\n\t}()\n}\n\n// Stop signals the background goroutine to stop. Safe to call when the syncer\n// is not running — a no-op in that case.\nfunc (s *Syncer) Stop() {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\tif !s.running {\n\t\treturn\n\t}\n\tclose(s.stopCh)\n\ts.running = false\n}\n\n// IsRunning reports whether the background goroutine is currently active.\nfunc (s *Syncer) IsRunning() bool {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\treturn s.running\n}\n\n// SyncConnection runs all three governance sync phases (metadata, querylog, access)\n// for a single connection. It prevents concurrent syncs per connection.\nfunc (s *Syncer) SyncConnection(ctx context.Context, creds CHCredentials) (*SyncResult, error) {\n\t// Prevent concurrent syncs for the same connection\n\tif _, loaded := s.activeSyncs.LoadOrStore(creds.ConnectionID, true); loaded {\n\t\treturn nil, fmt.Errorf(\"sync already in progress for connection %s\", creds.ConnectionID)\n\t}\n\tdefer s.activeSyncs.Delete(creds.ConnectionID)\n\n\tresult := &SyncResult{}\n\n\t// Phase 1: Metadata\n\tmetaResult, err := s.syncMetadata(ctx, creds)\n\tif err != nil {\n\t\tresult.MetadataError = err.Error()\n\t\tslog.Error(\"Metadata sync failed\", \"connection\", creds.ConnectionID, \"error\", err)\n\t} else {\n\t\tresult.MetadataResult = metaResult\n\t}\n\n\t// Phase 2: Query log\n\tqlResult, err := s.syncQueryLog(ctx, creds)\n\tif err != nil {\n\t\tresult.QueryLogError = err.Error()\n\t\tslog.Error(\"Query log sync failed\", \"connection\", creds.ConnectionID, \"error\", err)\n\t} else {\n\t\tresult.QueryLogResult = qlResult\n\t}\n\n\t// Phase 3: Access\n\taccessResult, err := s.syncAccess(ctx, creds)\n\tif err != nil {\n\t\tresult.AccessError = err.Error()\n\t\tslog.Error(\"Access sync failed\", \"connection\", creds.ConnectionID, \"error\", err)\n\t} else {\n\t\tresult.AccessResult = accessResult\n\t}\n\n\treturn result, nil\n}\n\n// SyncSingle runs a single sync phase for a connection.\nfunc (s *Syncer) SyncSingle(ctx context.Context, creds CHCredentials, syncType SyncType) error {\n\tswitch syncType {\n\tcase SyncMetadata:\n\t\t_, err := s.syncMetadata(ctx, creds)\n\t\treturn err\n\tcase SyncQueryLog:\n\t\t_, err := s.syncQueryLog(ctx, creds)\n\t\treturn err\n\tcase SyncAccess:\n\t\t_, err := s.syncAccess(ctx, creds)\n\t\treturn err\n\tdefault:\n\t\treturn fmt.Errorf(\"unknown sync type: %s\", syncType)\n\t}\n}\n\n// backgroundTick iterates over all connections, checks tunnel status and\n// sync staleness, borrows credentials from active sessions, and triggers\n// SyncConnection in goroutines.\nfunc (s *Syncer) backgroundTick() {\n\tconnections, err := s.db.GetConnections()\n\tif err != nil {\n\t\tslog.Error(\"Governance sync: failed to load connections\", \"error\", err)\n\t\treturn\n\t}\n\n\ts.pruneRetention(connections)\n\n\tvar wg sync.WaitGroup\n\tfor _, conn := range connections {\n\t\tconnID := conn.ID\n\n\t\t// Skip if tunnel is offline\n\t\tif !s.gateway.IsTunnelOnline(connID) {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Skip if a sync is already running for this connection\n\t\tif _, loaded := s.activeSyncs.Load(connID); loaded {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Check if any sync type is stale\n\t\tif !s.isSyncStale(connID) {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Borrow credentials from an active session\n\t\tcreds, err := s.findCredentials(connID)\n\t\tif err != nil {\n\t\t\tslog.Debug(\"Governance sync: no credentials for connection\",\n\t\t\t\t\"connection\", connID, \"error\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\twg.Add(1)\n\t\tgo func(c CHCredentials) {\n\t\t\tdefer wg.Done()\n\t\t\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)\n\t\t\tdefer cancel()\n\n\t\t\tresult, err := s.SyncConnection(ctx, c)\n\t\t\tif err != nil {\n\t\t\t\tslog.Error(\"Governance background sync failed\",\n\t\t\t\t\t\"connection\", c.ConnectionID, \"error\", err)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tslog.Info(\"Governance background sync completed\",\n\t\t\t\t\"connection\", c.ConnectionID,\n\t\t\t\t\"metadata\", result.MetadataResult != nil,\n\t\t\t\t\"querylog\", result.QueryLogResult != nil,\n\t\t\t\t\"access\", result.AccessResult != nil,\n\t\t\t)\n\t\t}(creds)\n\t}\n\n\twg.Wait()\n}\n\n// pruneRetention deletes query log and violation rows older than retentionDays\n// for every known connection, bounding SQLite growth on busy ClickHouse instances.\nfunc (s *Syncer) pruneRetention(connections []database.Connection) {\n\tcutoff := time.Now().UTC().AddDate(0, 0, -retentionDays).Format(\"2006-01-02 15:04:05\")\n\tfor _, conn := range connections {\n\t\tif n, err := s.store.CleanupOldQueryLogs(conn.ID, cutoff); err != nil {\n\t\t\tslog.Warn(\"Governance retention prune (query_log) failed\",\n\t\t\t\t\"connection\", conn.ID, \"error\", err)\n\t\t} else if n > 0 {\n\t\t\tslog.Info(\"Governance retention pruned query_log\",\n\t\t\t\t\"connection\", conn.ID, \"rows\", n, \"older_than\", cutoff)\n\t\t}\n\t\tif n, err := s.store.CleanupOldViolations(conn.ID, cutoff); err != nil {\n\t\t\tslog.Warn(\"Governance retention prune (violations) failed\",\n\t\t\t\t\"connection\", conn.ID, \"error\", err)\n\t\t} else if n > 0 {\n\t\t\tslog.Info(\"Governance retention pruned violations\",\n\t\t\t\t\"connection\", conn.ID, \"rows\", n, \"older_than\", cutoff)\n\t\t}\n\t}\n}\n\n// isSyncStale returns true if any sync type for the connection is older than staleDuration.\nfunc (s *Syncer) isSyncStale(connectionID string) bool {\n\tsyncTypes := []SyncType{SyncMetadata, SyncQueryLog, SyncAccess}\n\tfor _, st := range syncTypes {\n\t\tstate, err := s.store.GetSyncState(connectionID, string(st))\n\t\tif err != nil || state == nil {\n\t\t\treturn true // no state yet → needs sync\n\t\t}\n\t\tif state.LastSyncedAt == nil {\n\t\t\treturn true\n\t\t}\n\t\tlastSync, err := time.Parse(time.RFC3339, *state.LastSyncedAt)\n\t\tif err != nil {\n\t\t\treturn true\n\t\t}\n\t\tif time.Since(lastSync) > staleDuration {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}\n\n// findCredentials borrows credentials from an active session for the given connection.\n// It tries up to 3 recent sessions and returns the first one with a valid password.\nfunc (s *Syncer) findCredentials(connectionID string) (CHCredentials, error) {\n\tsessions, err := s.db.GetActiveSessionsByConnection(connectionID, 3)\n\tif err != nil {\n\t\treturn CHCredentials{}, fmt.Errorf(\"failed to load sessions: %w\", err)\n\t}\n\n\tfor _, sess := range sessions {\n\t\tpassword, err := crypto.Decrypt(sess.EncryptedPassword, s.secret)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\ts.auditCredentialBorrow(connectionID, sess)\n\t\treturn CHCredentials{\n\t\t\tConnectionID: connectionID,\n\t\t\tUser:         sess.ClickhouseUser,\n\t\t\tPassword:     password,\n\t\t}, nil\n\t}\n\n\treturn CHCredentials{}, fmt.Errorf(\"no active sessions with valid credentials for connection %s\", connectionID)\n}\n\n// auditCredentialBorrow writes one audit row per connection per hour when the\n// background syncer borrows credentials from an active session. A structured\n// debug log is emitted every time; the audit table only gets rate-limited\n// entries to avoid flooding it during frequent ticks.\nfunc (s *Syncer) auditCredentialBorrow(connectionID string, sess database.Session) {\n\tslog.Debug(\"Governance syncer borrowed session credentials\",\n\t\t\"connection\", connectionID, \"ch_user\", sess.ClickhouseUser, \"session_id\", sess.ID)\n\n\tnow := time.Now()\n\tif last, ok := s.lastBorrowLog.Load(connectionID); ok {\n\t\tif t, ok := last.(time.Time); ok && now.Sub(t) < time.Hour {\n\t\t\treturn\n\t\t}\n\t}\n\ts.lastBorrowLog.Store(connectionID, now)\n\n\tdetails := fmt.Sprintf(`{\"session_id\":%q,\"purpose\":\"background_sync\"}`, sess.ID)\n\tconnID := connectionID\n\tuser := sess.ClickhouseUser\n\tif err := s.db.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.credential_borrow\",\n\t\tUsername:     &user,\n\t\tConnectionID: &connID,\n\t\tDetails:      &details,\n\t}); err != nil {\n\t\tslog.Warn(\"Failed to write credential borrow audit log\",\n\t\t\t\"connection\", connectionID, \"error\", err)\n\t}\n}\n\n// executeQuery sends a SQL query through the tunnel and returns parsed rows.\n// The ClickHouse JSON format returns data as an array of objects:\n//\n//\t{\"data\": [{\"col1\": \"val1\", \"col2\": \"val2\"}, ...], \"meta\": [...], ...}\n//\n// The tunnel's QueryResult.Data contains this \"data\" array as json.RawMessage.\n// We first try to unmarshal as []map[string]interface{} (JSON format).\n// If that fails we fall back to [][]interface{} (JSONCompact) and combine with meta.\nfunc (s *Syncer) executeQuery(creds CHCredentials, sql string) ([]map[string]interface{}, error) {\n\tresult, err := s.gateway.ExecuteQuery(creds.ConnectionID, sql, creds.User, creds.Password, queryTimeout)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"query execution failed: %w\", err)\n\t}\n\tif result == nil || len(result.Data) == 0 {\n\t\treturn nil, nil\n\t}\n\n\t// Try JSON format: array of objects\n\tvar rows []map[string]interface{}\n\tif err := json.Unmarshal(result.Data, &rows); err == nil {\n\t\treturn rows, nil\n\t}\n\n\t// Fallback: JSONCompact format — array of arrays + meta\n\tvar arrays [][]interface{}\n\tif err := json.Unmarshal(result.Data, &arrays); err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to parse query result data: %w\", err)\n\t}\n\n\t// Parse meta for column names\n\ttype metaCol struct {\n\t\tName string `json:\"name\"`\n\t\tType string `json:\"type\"`\n\t}\n\tvar meta []metaCol\n\tif result.Meta != nil {\n\t\tif err := json.Unmarshal(result.Meta, &meta); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"failed to parse query result meta: %w\", err)\n\t\t}\n\t}\n\n\tif len(meta) == 0 && len(arrays) > 0 {\n\t\t// Generate placeholder column names\n\t\tfor i := range arrays[0] {\n\t\t\tmeta = append(meta, metaCol{Name: fmt.Sprintf(\"col%d\", i), Type: \"String\"})\n\t\t}\n\t}\n\n\trows = make([]map[string]interface{}, 0, len(arrays))\n\tfor _, row := range arrays {\n\t\tm := make(map[string]interface{}, len(meta))\n\t\tfor i, col := range meta {\n\t\t\tif i < len(row) {\n\t\t\t\tm[col.Name] = row[i]\n\t\t\t}\n\t\t}\n\t\trows = append(rows, m)\n\t}\n\n\treturn rows, nil\n}\n"
  },
  {
    "path": "internal/governance/types.go",
    "content": "package governance\n\n// ── Sensitivity tag constants ────────────────────────────────────────────────\n\ntype SensitivityTag string\n\nconst (\n\tTagPII       SensitivityTag = \"PII\"\n\tTagFinancial SensitivityTag = \"FINANCIAL\"\n\tTagInternal  SensitivityTag = \"INTERNAL\"\n\tTagPublic    SensitivityTag = \"PUBLIC\"\n\tTagCritical  SensitivityTag = \"CRITICAL\"\n)\n\nvar ValidTags = map[SensitivityTag]bool{\n\tTagPII: true, TagFinancial: true, TagInternal: true,\n\tTagPublic: true, TagCritical: true,\n}\n\n// ── Sync types ───────────────────────────────────────────────────────────────\n\ntype SyncType string\n\nconst (\n\tSyncMetadata SyncType = \"metadata\"\n\tSyncQueryLog SyncType = \"query_log\"\n\tSyncAccess   SyncType = \"access\"\n)\n\n// ── Schema change types ──────────────────────────────────────────────────────\n\ntype SchemaChangeType string\n\nconst (\n\tChangeDatabaseAdded     SchemaChangeType = \"database_added\"\n\tChangeDatabaseRemoved   SchemaChangeType = \"database_removed\"\n\tChangeTableAdded        SchemaChangeType = \"table_added\"\n\tChangeTableRemoved      SchemaChangeType = \"table_removed\"\n\tChangeColumnAdded       SchemaChangeType = \"column_added\"\n\tChangeColumnRemoved     SchemaChangeType = \"column_removed\"\n\tChangeColumnTypeChanged SchemaChangeType = \"column_type_changed\"\n)\n\n// ── Edge types ───────────────────────────────────────────────────────────────\n\ntype EdgeType string\n\nconst (\n\tEdgeSelectFrom     EdgeType = \"select_from\"\n\tEdgeInsertSelect   EdgeType = \"insert_select\"\n\tEdgeCreateAsSelect EdgeType = \"create_as_select\"\n)\n\n// ── Model structs ────────────────────────────────────────────────────────────\n\ntype SyncState struct {\n\tID           string  `json:\"id\"`\n\tConnectionID string  `json:\"connection_id\"`\n\tSyncType     string  `json:\"sync_type\"`\n\tLastSyncedAt *string `json:\"last_synced_at\"`\n\tWatermark    *string `json:\"watermark\"`\n\tStatus       string  `json:\"status\"`\n\tLastError    *string `json:\"last_error\"`\n\tRowCount     int     `json:\"row_count\"`\n\tCreatedAt    string  `json:\"created_at\"`\n\tUpdatedAt    string  `json:\"updated_at\"`\n}\n\ntype GovDatabase struct {\n\tID           string `json:\"id\"`\n\tConnectionID string `json:\"connection_id\"`\n\tName         string `json:\"name\"`\n\tEngine       string `json:\"engine\"`\n\tFirstSeen    string `json:\"first_seen\"`\n\tLastUpdated  string `json:\"last_updated\"`\n\tIsDeleted    bool   `json:\"is_deleted\"`\n}\n\ntype GovTable struct {\n\tID             string   `json:\"id\"`\n\tConnectionID   string   `json:\"connection_id\"`\n\tDatabaseName   string   `json:\"database_name\"`\n\tTableName      string   `json:\"table_name\"`\n\tEngine         string   `json:\"engine\"`\n\tTableUUID      string   `json:\"table_uuid\"`\n\tTotalRows      int64    `json:\"total_rows\"`\n\tTotalBytes     int64    `json:\"total_bytes\"`\n\tPartitionCount int      `json:\"partition_count\"`\n\tFirstSeen      string   `json:\"first_seen\"`\n\tLastUpdated    string   `json:\"last_updated\"`\n\tIsDeleted      bool     `json:\"is_deleted\"`\n\tTags           []string `json:\"tags,omitempty\"`\n}\n\ntype GovColumn struct {\n\tID                string   `json:\"id\"`\n\tConnectionID      string   `json:\"connection_id\"`\n\tDatabaseName      string   `json:\"database_name\"`\n\tTableName         string   `json:\"table_name\"`\n\tColumnName        string   `json:\"column_name\"`\n\tColumnType        string   `json:\"column_type\"`\n\tColumnPosition    int      `json:\"column_position\"`\n\tDefaultKind       *string  `json:\"default_kind\"`\n\tDefaultExpression *string  `json:\"default_expression\"`\n\tComment           *string  `json:\"comment\"`\n\tFirstSeen         string   `json:\"first_seen\"`\n\tLastUpdated       string   `json:\"last_updated\"`\n\tIsDeleted         bool     `json:\"is_deleted\"`\n\tTags              []string `json:\"tags,omitempty\"`\n}\n\ntype SchemaChange struct {\n\tID           string `json:\"id\"`\n\tConnectionID string `json:\"connection_id\"`\n\tChangeType   string `json:\"change_type\"`\n\tDatabaseName string `json:\"database_name\"`\n\tTableName    string `json:\"table_name\"`\n\tColumnName   string `json:\"column_name\"`\n\tOldValue     string `json:\"old_value\"`\n\tNewValue     string `json:\"new_value\"`\n\tDetectedAt   string `json:\"detected_at\"`\n\tCreatedAt    string `json:\"created_at\"`\n}\n\ntype QueryLogEntry struct {\n\tID             string  `json:\"id\"`\n\tConnectionID   string  `json:\"connection_id\"`\n\tQueryID        string  `json:\"query_id\"`\n\tUser           string  `json:\"ch_user\"`\n\tQueryText      string  `json:\"query_text\"`\n\tNormalizedHash string  `json:\"normalized_hash\"`\n\tQueryKind      string  `json:\"query_kind\"`\n\tEventTime      string  `json:\"event_time\"`\n\tDurationMs     int64   `json:\"duration_ms\"`\n\tReadRows       int64   `json:\"read_rows\"`\n\tReadBytes      int64   `json:\"read_bytes\"`\n\tResultRows     int64   `json:\"result_rows\"`\n\tWrittenRows    int64   `json:\"written_rows\"`\n\tWrittenBytes   int64   `json:\"written_bytes\"`\n\tMemoryUsage    int64   `json:\"memory_usage\"`\n\tTablesUsed     string  `json:\"tables_used\"`\n\tIsError        bool    `json:\"is_error\"`\n\tErrorMessage   *string `json:\"error_message\"`\n\tCreatedAt      string  `json:\"created_at\"`\n}\n\ntype LineageEdge struct {\n\tID             string              `json:\"id\"`\n\tConnectionID   string              `json:\"connection_id\"`\n\tSourceDatabase string              `json:\"source_database\"`\n\tSourceTable    string              `json:\"source_table\"`\n\tTargetDatabase string              `json:\"target_database\"`\n\tTargetTable    string              `json:\"target_table\"`\n\tQueryID        string              `json:\"query_id\"`\n\tUser           string              `json:\"ch_user\"`\n\tEdgeType       string              `json:\"edge_type\"`\n\tDetectedAt     string              `json:\"detected_at\"`\n\tColumnEdges    []ColumnLineageEdge `json:\"column_edges,omitempty\"`\n}\n\ntype ColumnLineageEdge struct {\n\tID            string `json:\"id\"`\n\tLineageEdgeID string `json:\"lineage_edge_id\"`\n\tConnectionID  string `json:\"connection_id\"`\n\tSourceColumn  string `json:\"source_column\"`\n\tTargetColumn  string `json:\"target_column\"`\n}\n\ntype TagEntry struct {\n\tID           string `json:\"id\"`\n\tConnectionID string `json:\"connection_id\"`\n\tObjectType   string `json:\"object_type\"`\n\tDatabaseName string `json:\"database_name\"`\n\tTableName    string `json:\"table_name\"`\n\tColumnName   string `json:\"column_name\"`\n\tTag          string `json:\"tag\"`\n\tTaggedBy     string `json:\"tagged_by\"`\n\tCreatedAt    string `json:\"created_at\"`\n}\n\ntype ChUser struct {\n\tID           string  `json:\"id\"`\n\tConnectionID string  `json:\"connection_id\"`\n\tName         string  `json:\"name\"`\n\tAuthType     *string `json:\"auth_type\"`\n\tHostIP       *string `json:\"host_ip\"`\n\tDefaultRoles *string `json:\"default_roles\"`\n\tFirstSeen    string  `json:\"first_seen\"`\n\tLastUpdated  string  `json:\"last_updated\"`\n}\n\ntype ChRole struct {\n\tID           string `json:\"id\"`\n\tConnectionID string `json:\"connection_id\"`\n\tName         string `json:\"name\"`\n\tFirstSeen    string `json:\"first_seen\"`\n\tLastUpdated  string `json:\"last_updated\"`\n}\n\ntype RoleGrant struct {\n\tID              string `json:\"id\"`\n\tConnectionID    string `json:\"connection_id\"`\n\tUserName        string `json:\"user_name\"`\n\tGrantedRoleName string `json:\"granted_role_name\"`\n\tIsDefault       bool   `json:\"is_default\"`\n\tWithAdminOption bool   `json:\"with_admin_option\"`\n\tFirstSeen       string `json:\"first_seen\"`\n\tLastUpdated     string `json:\"last_updated\"`\n}\n\ntype Grant struct {\n\tID              string  `json:\"id\"`\n\tConnectionID    string  `json:\"connection_id\"`\n\tUserName        *string `json:\"user_name\"`\n\tRoleName        *string `json:\"role_name\"`\n\tAccessType      string  `json:\"access_type\"`\n\tGrantDatabase   *string `json:\"grant_database\"`\n\tGrantTable      *string `json:\"grant_table\"`\n\tGrantColumn     *string `json:\"grant_column\"`\n\tIsPartialRevoke bool    `json:\"is_partial_revoke\"`\n\tGrantOption     bool    `json:\"grant_option\"`\n\tFirstSeen       string  `json:\"first_seen\"`\n\tLastUpdated     string  `json:\"last_updated\"`\n}\n\ntype AccessMatrixEntry struct {\n\tID            string  `json:\"id\"`\n\tConnectionID  string  `json:\"connection_id\"`\n\tUserName      string  `json:\"user_name\"`\n\tRoleName      *string `json:\"role_name\"`\n\tDatabaseName  *string `json:\"database_name\"`\n\tTableName     *string `json:\"table_name\"`\n\tPrivilege     string  `json:\"privilege\"`\n\tIsDirectGrant bool    `json:\"is_direct_grant\"`\n\tLastQueryTime *string `json:\"last_query_time\"`\n}\n\ntype Policy struct {\n\tID              string  `json:\"id\"`\n\tConnectionID    string  `json:\"connection_id\"`\n\tName            string  `json:\"name\"`\n\tDescription     *string `json:\"description\"`\n\tObjectType      string  `json:\"object_type\"`\n\tObjectDatabase  *string `json:\"object_database\"`\n\tObjectTable     *string `json:\"object_table\"`\n\tObjectColumn    *string `json:\"object_column\"`\n\tRequiredRole    string  `json:\"required_role\"`\n\tSeverity        string  `json:\"severity\"`\n\tEnforcementMode string  `json:\"enforcement_mode\"`\n\tEnabled         bool    `json:\"enabled\"`\n\tCreatedBy       *string `json:\"created_by\"`\n\tCreatedAt       string  `json:\"created_at\"`\n\tUpdatedAt       string  `json:\"updated_at\"`\n}\n\ntype PolicyViolation struct {\n\tID              string  `json:\"id\"`\n\tConnectionID    string  `json:\"connection_id\"`\n\tPolicyID        string  `json:\"policy_id\"`\n\tQueryLogID      string  `json:\"query_log_id\"`\n\tUser            string  `json:\"ch_user\"`\n\tViolationDetail string  `json:\"violation_detail\"`\n\tSeverity        string  `json:\"severity\"`\n\tDetectionPhase  string  `json:\"detection_phase\"`\n\tRequestEndpoint *string `json:\"request_endpoint\"`\n\tDetectedAt      string  `json:\"detected_at\"`\n\tCreatedAt       string  `json:\"created_at\"`\n\t// Joined fields\n\tPolicyName string `json:\"policy_name,omitempty\"`\n}\n\ntype ObjectComment struct {\n\tID           string  `json:\"id\"`\n\tConnectionID string  `json:\"connection_id\"`\n\tObjectType   string  `json:\"object_type\"`\n\tDatabaseName string  `json:\"database_name\"`\n\tTableName    string  `json:\"table_name\"`\n\tColumnName   string  `json:\"column_name\"`\n\tCommentText  string  `json:\"comment_text\"`\n\tCreatedBy    *string `json:\"created_by\"`\n\tCreatedAt    string  `json:\"created_at\"`\n\tUpdatedAt    string  `json:\"updated_at\"`\n}\n\ntype Incident struct {\n\tID              string  `json:\"id\"`\n\tConnectionID    string  `json:\"connection_id\"`\n\tSourceType      string  `json:\"source_type\"`\n\tSourceRef       *string `json:\"source_ref\"`\n\tDedupeKey       *string `json:\"dedupe_key\"`\n\tTitle           string  `json:\"title\"`\n\tSeverity        string  `json:\"severity\"`\n\tStatus          string  `json:\"status\"`\n\tAssignee        *string `json:\"assignee\"`\n\tDetails         *string `json:\"details\"`\n\tResolutionNote  *string `json:\"resolution_note\"`\n\tOccurrenceCount int     `json:\"occurrence_count\"`\n\tFirstSeenAt     string  `json:\"first_seen_at\"`\n\tLastSeenAt      string  `json:\"last_seen_at\"`\n\tResolvedAt      *string `json:\"resolved_at\"`\n\tCreatedBy       *string `json:\"created_by\"`\n\tCreatedAt       string  `json:\"created_at\"`\n\tUpdatedAt       string  `json:\"updated_at\"`\n}\n\ntype IncidentComment struct {\n\tID          string  `json:\"id\"`\n\tIncidentID  string  `json:\"incident_id\"`\n\tCommentText string  `json:\"comment_text\"`\n\tCreatedBy   *string `json:\"created_by\"`\n\tCreatedAt   string  `json:\"created_at\"`\n}\n\n// ── Summary types ────────────────────────────────────────────────────────────\n\ntype GovernanceOverview struct {\n\tDatabaseCount     int               `json:\"database_count\"`\n\tTableCount        int               `json:\"table_count\"`\n\tColumnCount       int               `json:\"column_count\"`\n\tTaggedTableCount  int               `json:\"tagged_table_count\"`\n\tUserCount         int               `json:\"user_count\"`\n\tRoleCount         int               `json:\"role_count\"`\n\tQueryCount24h     int               `json:\"query_count_24h\"`\n\tLineageEdgeCount  int               `json:\"lineage_edge_count\"`\n\tPolicyCount       int               `json:\"policy_count\"`\n\tViolationCount    int               `json:\"violation_count\"`\n\tIncidentCount     int               `json:\"incident_count\"`\n\tSchemaChangeCount int               `json:\"schema_change_count\"`\n\tSyncStates        []SyncState       `json:\"sync_states\"`\n\tRecentChanges     []SchemaChange    `json:\"recent_changes\"`\n\tRecentViolations  []PolicyViolation `json:\"recent_violations\"`\n}\n\ntype OverPermission struct {\n\tUserName       string  `json:\"user_name\"`\n\tRoleName       *string `json:\"role_name\"`\n\tDatabaseName   *string `json:\"database_name\"`\n\tTableName      *string `json:\"table_name\"`\n\tPrivilege      string  `json:\"privilege\"`\n\tLastQueryTime  *string `json:\"last_query_time\"`\n\tDaysSinceQuery *int    `json:\"days_since_query\"`\n\tReason         string  `json:\"reason\"`\n}\n\n// ── Sync result types ────────────────────────────────────────────────────────\n\ntype SyncResult struct {\n\tMetadataResult *MetadataSyncResult `json:\"metadata,omitempty\"`\n\tMetadataError  string              `json:\"metadata_error,omitempty\"`\n\tQueryLogResult *QueryLogSyncResult `json:\"query_log,omitempty\"`\n\tQueryLogError  string              `json:\"query_log_error,omitempty\"`\n\tAccessResult   *AccessSyncResult   `json:\"access,omitempty\"`\n\tAccessError    string              `json:\"access_error,omitempty\"`\n}\n\ntype MetadataSyncResult struct {\n\tDatabasesSynced int `json:\"databases_synced\"`\n\tTablesSynced    int `json:\"tables_synced\"`\n\tColumnsSynced   int `json:\"columns_synced\"`\n\tSchemaChanges   int `json:\"schema_changes\"`\n}\n\ntype QueryLogSyncResult struct {\n\tQueriesIngested   int    `json:\"queries_ingested\"`\n\tLineageEdgesFound int    `json:\"lineage_edges_found\"`\n\tViolationsFound   int    `json:\"violations_found\"`\n\tNewWatermark      string `json:\"new_watermark\"`\n}\n\ntype AccessSyncResult struct {\n\tUsersSynced     int `json:\"users_synced\"`\n\tRolesSynced     int `json:\"roles_synced\"`\n\tGrantsSynced    int `json:\"grants_synced\"`\n\tMatrixEntries   int `json:\"matrix_entries\"`\n\tOverPermissions int `json:\"over_permissions\"`\n}\n\n// ── Credentials holder ───────────────────────────────────────────────────────\n\ntype CHCredentials struct {\n\tConnectionID string\n\tUser         string\n\tPassword     string\n}\n\n// ── Lineage graph (for API response) ─────────────────────────────────────────\n\ntype LineageNode struct {\n\tID       string      `json:\"id\"`\n\tDatabase string      `json:\"database\"`\n\tTable    string      `json:\"table\"`\n\tType     string      `json:\"type\"` // \"source\", \"target\", \"current\"\n\tColumns  []GovColumn `json:\"columns,omitempty\"`\n}\n\ntype LineageGraph struct {\n\tNodes []LineageNode `json:\"nodes\"`\n\tEdges []LineageEdge `json:\"edges\"`\n}\n\n// ── Helpers ──────────────────────────────────────────────────────────────────\n\nfunc StrPtr(s string) *string {\n\tif s == \"\" {\n\t\treturn nil\n\t}\n\treturn &s\n}\n"
  },
  {
    "path": "internal/langfuse/langfuse.go",
    "content": "package langfuse\n\nimport (\n\t\"bytes\"\n\t\"encoding/json\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"strings\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n)\n\n// Config holds Langfuse connection settings.\ntype Config struct {\n\tPublicKey string\n\tSecretKey string\n\tBaseURL   string\n}\n\n// Enabled returns true when both keys are set.\nfunc (c Config) Enabled() bool {\n\treturn c.PublicKey != \"\" && c.SecretKey != \"\"\n}\n\n// NormalizeBaseURL ensures BaseURL has a sensible default and no trailing slash.\nfunc (c *Config) NormalizeBaseURL() {\n\tc.BaseURL = strings.TrimRight(strings.TrimSpace(c.BaseURL), \"/\")\n\tif c.BaseURL == \"\" {\n\t\tc.BaseURL = \"https://cloud.langfuse.com\"\n\t}\n}\n\n// Usage holds token counts for a generation.\ntype Usage struct {\n\tInput  int `json:\"input\"`\n\tOutput int `json:\"output\"`\n\tTotal  int `json:\"total\"`\n}\n\n// TraceParams captures trace-level data.\ntype TraceParams struct {\n\tID        string\n\tName      string\n\tUserID    string\n\tSessionID string\n\tInput     interface{}\n\tOutput    interface{}\n\tRelease   string\n\tMetadata  map[string]string\n\tTags      []string\n}\n\n// GenerationParams captures one LLM generation.\ntype GenerationParams struct {\n\tID              string\n\tTraceID         string\n\tName            string\n\tModel           string\n\tModelParameters map[string]interface{}\n\tInput           interface{}\n\tOutput          interface{}\n\tStartTime       time.Time\n\tEndTime         time.Time\n\tUsage           *Usage\n\tLevel           string // \"DEFAULT\" or \"ERROR\"\n}\n\n// ScoreParams captures a score attached to a trace.\ntype ScoreParams struct {\n\tTraceID  string\n\tName     string\n\tValue    float64\n\tComment  string\n\tDataType string // \"NUMERIC\" or \"BOOLEAN\"\n}\n\n// EventParams captures a point-in-time event within a trace.\ntype EventParams struct {\n\tTraceID string\n\tName    string\n\tInput   interface{}\n\tLevel   string\n}\n\ntype event struct {\n\tID        string      `json:\"id\"`\n\tType      string      `json:\"type\"`\n\tTimestamp string      `json:\"timestamp\"`\n\tBody      interface{} `json:\"body\"`\n}\n\n// Client sends observability events to Langfuse asynchronously.\n// Always non-nil — inactive when config is not enabled.\ntype Client struct {\n\tmu     sync.RWMutex\n\tcfg    Config\n\thttp   *http.Client\n\tevents chan event\n\tstopCh chan struct{}\n\twg     sync.WaitGroup\n}\n\n// New creates a Client. Always returns a valid pointer.\n// The client is inactive until Reconfigure is called with valid credentials.\nfunc New() *Client {\n\treturn &Client{\n\t\thttp:   &http.Client{Timeout: 10 * time.Second},\n\t\tevents: make(chan event, 256),\n\t\tstopCh: make(chan struct{}),\n\t}\n}\n\n// Reconfigure swaps the config at runtime. Safe to call while the client is running.\nfunc (c *Client) Reconfigure(cfg Config) {\n\tcfg.NormalizeBaseURL()\n\tc.mu.Lock()\n\tc.cfg = cfg\n\tc.mu.Unlock()\n\tif cfg.Enabled() {\n\t\tslog.Info(\"Langfuse observability configured\", \"base_url\", cfg.BaseURL)\n\t} else {\n\t\tslog.Info(\"Langfuse observability disabled\")\n\t}\n}\n\n// IsEnabled returns true if the client has valid credentials.\nfunc (c *Client) IsEnabled() bool {\n\tc.mu.RLock()\n\tdefer c.mu.RUnlock()\n\treturn c.cfg.Enabled()\n}\n\nfunc (c *Client) getConfig() Config {\n\tc.mu.RLock()\n\tdefer c.mu.RUnlock()\n\treturn c.cfg\n}\n\n// Start spawns the background flush goroutine.\nfunc (c *Client) Start() {\n\tc.wg.Add(1)\n\tgo c.loop()\n}\n\n// Stop drains pending events and shuts down.\nfunc (c *Client) Stop() {\n\tclose(c.stopCh)\n\tc.wg.Wait()\n\tslog.Info(\"Langfuse client stopped\")\n}\n\nfunc (c *Client) loop() {\n\tdefer c.wg.Done()\n\tticker := time.NewTicker(5 * time.Second)\n\tdefer ticker.Stop()\n\n\tvar buf []event\n\tfor {\n\t\tselect {\n\t\tcase e := <-c.events:\n\t\t\tbuf = append(buf, e)\n\t\t\tif len(buf) >= 10 {\n\t\t\t\tc.flush(buf)\n\t\t\t\tbuf = buf[:0]\n\t\t\t}\n\t\tcase <-ticker.C:\n\t\t\tif len(buf) > 0 {\n\t\t\t\tc.flush(buf)\n\t\t\t\tbuf = buf[:0]\n\t\t\t}\n\t\tcase <-c.stopCh:\n\t\t\tfor {\n\t\t\t\tselect {\n\t\t\t\tcase e := <-c.events:\n\t\t\t\t\tbuf = append(buf, e)\n\t\t\t\tdefault:\n\t\t\t\t\tc.flush(buf)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}\n\nfunc (c *Client) enqueue(e event) {\n\tif !c.IsEnabled() {\n\t\treturn\n\t}\n\tselect {\n\tcase c.events <- e:\n\tdefault:\n\t\tslog.Warn(\"langfuse event dropped, channel full\")\n\t}\n}\n\nfunc now() string {\n\treturn time.Now().UTC().Format(time.RFC3339Nano)\n}\n\nfunc newID() string {\n\treturn uuid.NewString()\n}\n\n// LogTrace enqueues a trace-create event.\nfunc (c *Client) LogTrace(p TraceParams) {\n\tid := p.ID\n\tif id == \"\" {\n\t\tid = newID()\n\t}\n\tbody := map[string]interface{}{\n\t\t\"id\":   id,\n\t\t\"name\": p.Name,\n\t}\n\tif p.UserID != \"\" {\n\t\tbody[\"userId\"] = p.UserID\n\t}\n\tif p.SessionID != \"\" {\n\t\tbody[\"sessionId\"] = p.SessionID\n\t}\n\tif p.Input != nil {\n\t\tbody[\"input\"] = p.Input\n\t}\n\tif p.Output != nil {\n\t\tbody[\"output\"] = p.Output\n\t}\n\tif p.Release != \"\" {\n\t\tbody[\"release\"] = p.Release\n\t}\n\tif len(p.Metadata) > 0 {\n\t\tbody[\"metadata\"] = p.Metadata\n\t}\n\tif len(p.Tags) > 0 {\n\t\tbody[\"tags\"] = p.Tags\n\t}\n\n\tc.enqueue(event{\n\t\tID:        id,\n\t\tType:      \"trace-create\",\n\t\tTimestamp: now(),\n\t\tBody:      body,\n\t})\n}\n\n// LogGeneration enqueues a generation-create event.\nfunc (c *Client) LogGeneration(p GenerationParams) {\n\tid := p.ID\n\tif id == \"\" {\n\t\tid = newID()\n\t}\n\n\tbody := map[string]interface{}{\n\t\t\"id\":        id,\n\t\t\"traceId\":   p.TraceID,\n\t\t\"name\":      p.Name,\n\t\t\"type\":      \"GENERATION\",\n\t\t\"model\":     p.Model,\n\t\t\"startTime\": p.StartTime.UTC().Format(time.RFC3339Nano),\n\t\t\"endTime\":   p.EndTime.UTC().Format(time.RFC3339Nano),\n\t}\n\tif len(p.ModelParameters) > 0 {\n\t\tbody[\"modelParameters\"] = p.ModelParameters\n\t}\n\tif p.Input != nil {\n\t\tbody[\"input\"] = p.Input\n\t}\n\tif p.Output != nil {\n\t\tbody[\"output\"] = p.Output\n\t}\n\tif p.Usage != nil {\n\t\tbody[\"usage\"] = p.Usage\n\t}\n\tif p.Level != \"\" {\n\t\tbody[\"level\"] = p.Level\n\t}\n\n\tc.enqueue(event{\n\t\tID:        id,\n\t\tType:      \"generation-create\",\n\t\tTimestamp: now(),\n\t\tBody:      body,\n\t})\n}\n\n// LogScore enqueues a score-create event.\nfunc (c *Client) LogScore(p ScoreParams) {\n\tdataType := p.DataType\n\tif dataType == \"\" {\n\t\tdataType = \"NUMERIC\"\n\t}\n\tbody := map[string]interface{}{\n\t\t\"traceId\":  p.TraceID,\n\t\t\"name\":     p.Name,\n\t\t\"value\":    p.Value,\n\t\t\"dataType\": dataType,\n\t}\n\tif p.Comment != \"\" {\n\t\tbody[\"comment\"] = p.Comment\n\t}\n\n\tc.enqueue(event{\n\t\tID:        newID(),\n\t\tType:      \"score-create\",\n\t\tTimestamp: now(),\n\t\tBody:      body,\n\t})\n}\n\n// LogEvent enqueues an event-create for notable occurrences within a trace.\nfunc (c *Client) LogEvent(p EventParams) {\n\tbody := map[string]interface{}{\n\t\t\"traceId\": p.TraceID,\n\t\t\"name\":    p.Name,\n\t}\n\tif p.Input != nil {\n\t\tbody[\"input\"] = p.Input\n\t}\n\tif p.Level != \"\" {\n\t\tbody[\"level\"] = p.Level\n\t}\n\n\tc.enqueue(event{\n\t\tID:        newID(),\n\t\tType:      \"event-create\",\n\t\tTimestamp: now(),\n\t\tBody:      body,\n\t})\n}\n\nfunc (c *Client) flush(batch []event) {\n\tif len(batch) == 0 {\n\t\treturn\n\t}\n\n\tcfg := c.getConfig()\n\tif !cfg.Enabled() {\n\t\treturn\n\t}\n\n\tpayload := map[string]interface{}{\n\t\t\"batch\": batch,\n\t}\n\tbody, err := json.Marshal(payload)\n\tif err != nil {\n\t\tslog.Warn(\"langfuse: failed to marshal batch\", \"error\", err)\n\t\treturn\n\t}\n\n\treq, err := http.NewRequest(http.MethodPost, cfg.BaseURL+\"/api/public/ingestion\", bytes.NewReader(body))\n\tif err != nil {\n\t\tslog.Warn(\"langfuse: failed to create request\", \"error\", err)\n\t\treturn\n\t}\n\treq.SetBasicAuth(cfg.PublicKey, cfg.SecretKey)\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\n\tresp, err := c.http.Do(req)\n\tif err != nil {\n\t\tslog.Warn(\"langfuse: flush failed\", \"error\", err)\n\t\treturn\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusMultiStatus {\n\t\tslog.Warn(\"langfuse: unexpected status\", \"status\", resp.StatusCode)\n\t}\n}\n\n// TestConnection verifies credentials by calling the Langfuse API.\n// Returns nil on success, error with details on failure.\nfunc (c *Client) TestConnection(cfg Config) error {\n\tcfg.NormalizeBaseURL()\n\treq, err := http.NewRequest(http.MethodGet, cfg.BaseURL+\"/api/public/projects\", nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\treq.SetBasicAuth(cfg.PublicKey, cfg.SecretKey)\n\n\tresp, err := c.http.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode == http.StatusOK {\n\t\treturn nil\n\t}\n\treturn &ConnectionError{StatusCode: resp.StatusCode}\n}\n\n// ConnectionError represents a Langfuse API error.\ntype ConnectionError struct {\n\tStatusCode int\n}\n\nfunc (e *ConnectionError) Error() string {\n\tswitch e.StatusCode {\n\tcase 401:\n\t\treturn \"invalid credentials\"\n\tcase 403:\n\t\treturn \"access denied\"\n\tdefault:\n\t\treturn \"unexpected status: \" + http.StatusText(e.StatusCode)\n\t}\n}\n"
  },
  {
    "path": "internal/license/license.go",
    "content": "package license\n\nimport (\n\t\"crypto/ed25519\"\n\t\"crypto/x509\"\n\t\"encoding/base64\"\n\t\"encoding/json\"\n\t\"encoding/pem\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"sort\"\n\t\"strings\"\n\t\"time\"\n)\n\n// LicenseFile is the on-disk JSON format for a signed license.\ntype LicenseFile struct {\n\tLicenseID      string   `json:\"license_id\"`\n\tEdition        string   `json:\"edition\"`\n\tCustomer       string   `json:\"customer\"`\n\tFeatures       []string `json:\"features\"`\n\tMaxConnections int      `json:\"max_connections\"`\n\tIssuedAt       string   `json:\"issued_at\"`\n\tExpiresAt      string   `json:\"expires_at\"`\n\tSignature      string   `json:\"signature\"`\n}\n\n// LicenseInfo is the public-facing license status returned by the API.\ntype LicenseInfo struct {\n\tEdition   string `json:\"edition\"`\n\tValid     bool   `json:\"valid\"`\n\tCustomer  string `json:\"customer,omitempty\"`\n\tExpiresAt string `json:\"expires_at,omitempty\"`\n\tLicenseID string `json:\"license_id,omitempty\"`\n}\n\n// CommunityLicense returns the default community license info.\nfunc CommunityLicense() *LicenseInfo {\n\treturn &LicenseInfo{\n\t\tEdition: \"community\",\n\t\tValid:   false,\n\t}\n}\n\n// ValidateLicense parses and verifies a signed license JSON string.\n// Returns a LicenseInfo with Valid=true on success, or CommunityLicense() on any failure.\nfunc ValidateLicense(licenseJSON string) *LicenseInfo {\n\tif licenseJSON == \"\" {\n\t\treturn CommunityLicense()\n\t}\n\n\tvar lf LicenseFile\n\tif err := json.Unmarshal([]byte(licenseJSON), &lf); err != nil {\n\t\tslog.Warn(\"License parse error\", \"error\", err)\n\t\treturn CommunityLicense()\n\t}\n\n\t// Decode the embedded public key\n\tpub, err := parsePublicKey(publicKeyPEM)\n\tif err != nil {\n\t\tslog.Error(\"Failed to parse embedded public key\", \"error\", err)\n\t\treturn CommunityLicense()\n\t}\n\n\t// Rebuild the signable payload (all fields except signature)\n\tpayload := SignablePayload(lf)\n\n\t// Decode and verify the signature\n\tsig, err := base64.StdEncoding.DecodeString(lf.Signature)\n\tif err != nil {\n\t\tslog.Warn(\"License signature decode error\", \"error\", err)\n\t\treturn CommunityLicense()\n\t}\n\n\tif !ed25519.Verify(pub, payload, sig) {\n\t\tslog.Warn(\"License signature verification failed\")\n\t\treturn CommunityLicense()\n\t}\n\n\t// Check expiry\n\texpires, err := time.Parse(time.RFC3339, lf.ExpiresAt)\n\tif err != nil {\n\t\tslog.Warn(\"License expiry parse error\", \"error\", err)\n\t\treturn CommunityLicense()\n\t}\n\n\tif expires.Before(time.Now()) {\n\t\tslog.Warn(\"License expired\", \"expires_at\", lf.ExpiresAt)\n\t\treturn &LicenseInfo{\n\t\t\tEdition:   strings.ToLower(strings.TrimSpace(lf.Edition)),\n\t\t\tValid:     false,\n\t\t\tCustomer:  lf.Customer,\n\t\t\tExpiresAt: lf.ExpiresAt,\n\t\t\tLicenseID: lf.LicenseID,\n\t\t}\n\t}\n\n\tedition := strings.ToLower(strings.TrimSpace(lf.Edition))\n\n\tslog.Debug(\"Pro license validated\", \"customer\", lf.Customer, \"expires\", lf.ExpiresAt)\n\treturn &LicenseInfo{\n\t\tEdition:   edition,\n\t\tValid:     true,\n\t\tCustomer:  lf.Customer,\n\t\tExpiresAt: lf.ExpiresAt,\n\t\tLicenseID: lf.LicenseID,\n\t}\n}\n\n// SignablePayload returns the canonical JSON bytes for signature verification.\n// All fields except \"signature\", sorted by key, compact encoding.\nfunc SignablePayload(lf LicenseFile) []byte {\n\tm := map[string]interface{}{\n\t\t\"license_id\":      lf.LicenseID,\n\t\t\"edition\":         lf.Edition,\n\t\t\"customer\":        lf.Customer,\n\t\t\"features\":        lf.Features,\n\t\t\"max_connections\": lf.MaxConnections,\n\t\t\"issued_at\":       lf.IssuedAt,\n\t\t\"expires_at\":      lf.ExpiresAt,\n\t}\n\n\tkeys := make([]string, 0, len(m))\n\tfor k := range m {\n\t\tkeys = append(keys, k)\n\t}\n\tsort.Strings(keys)\n\n\tbuf := []byte(\"{\")\n\tfor i, k := range keys {\n\t\tif i > 0 {\n\t\t\tbuf = append(buf, ',')\n\t\t}\n\t\tkb, _ := json.Marshal(k)\n\t\tvb, _ := json.Marshal(m[k])\n\t\tbuf = append(buf, kb...)\n\t\tbuf = append(buf, ':')\n\t\tbuf = append(buf, vb...)\n\t}\n\tbuf = append(buf, '}')\n\treturn buf\n}\n\nfunc parsePublicKey(pemData []byte) (ed25519.PublicKey, error) {\n\tblock, _ := pem.Decode(pemData)\n\tif block == nil {\n\t\treturn nil, fmt.Errorf(\"no PEM block found\")\n\t}\n\tkey, err := x509.ParsePKIXPublicKey(block.Bytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpub, ok := key.(ed25519.PublicKey)\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"not an Ed25519 public key\")\n\t}\n\treturn pub, nil\n}\n"
  },
  {
    "path": "internal/license/pubkey.go",
    "content": "package license\n\nimport _ \"embed\"\n\n//go:embed public.pem\nvar publicKeyPEM []byte\n"
  },
  {
    "path": "internal/license/public.pem",
    "content": "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEA62CBTMWey4wS4Fknr/5Sfk7k1J7+4MYpBfxBPvKXRFg=\n-----END PUBLIC KEY-----\n"
  },
  {
    "path": "internal/license/tokens.go",
    "content": "package license\n\nimport (\n\t\"crypto/rand\"\n\t\"encoding/hex\"\n\t\"regexp\"\n\t\"strings\"\n\n\t\"github.com/google/uuid\"\n)\n\n// GenerateTunnelToken generates a tunnel token with prefix 'cht_'\nfunc GenerateTunnelToken() string {\n\tb := make([]byte, 16)\n\trand.Read(b)\n\treturn \"cht_\" + hex.EncodeToString(b)\n}\n\n// GenerateSessionToken generates a session token\nfunc GenerateSessionToken() string {\n\tu1 := uuid.New().String()\n\tu2 := strings.ReplaceAll(uuid.New().String(), \"-\", \"\")\n\treturn u1 + u2\n}\n\nvar tunnelTokenRegex = regexp.MustCompile(`^cht_[a-f0-9]{32}$`)\n\n// IsValidTunnelToken validates tunnel token format\nfunc IsValidTunnelToken(token string) bool {\n\treturn tunnelTokenRegex.MatchString(token)\n}\n"
  },
  {
    "path": "internal/models/dag.go",
    "content": "package models\n\nimport \"fmt\"\n\n// DepGraph is the resolved DAG of model dependencies.\ntype DepGraph struct {\n\t// Order is the topological execution order (model IDs).\n\tOrder []string\n\t// Deps maps model_id -> [dependency model_ids] (upstream).\n\tDeps map[string][]string\n\t// RevDeps maps model_id -> [dependent model_ids] (downstream).\n\tRevDeps map[string][]string\n}\n\n// BuildDAG constructs the dependency graph and returns topological order.\n// modelIDs: all model IDs.\n// refsByID: model_id -> [referenced model names from $ref()].\n// nameToID: model_name -> model_id.\nfunc BuildDAG(modelIDs []string, refsByID map[string][]string, nameToID map[string]string) (*DepGraph, error) {\n\tg := &DepGraph{\n\t\tDeps:    make(map[string][]string),\n\t\tRevDeps: make(map[string][]string),\n\t}\n\n\t// Build in-degree map\n\tinDegree := make(map[string]int, len(modelIDs))\n\tfor _, id := range modelIDs {\n\t\tinDegree[id] = 0\n\t}\n\n\tfor id, refNames := range refsByID {\n\t\tfor _, refName := range refNames {\n\t\t\tdepID, ok := nameToID[refName]\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"model references unknown model %q via $ref()\", refName)\n\t\t\t}\n\t\t\tif depID == id {\n\t\t\t\treturn nil, fmt.Errorf(\"model cannot reference itself via $ref(%s)\", refName)\n\t\t\t}\n\t\t\tg.Deps[id] = append(g.Deps[id], depID)\n\t\t\tg.RevDeps[depID] = append(g.RevDeps[depID], id)\n\t\t\tinDegree[id]++\n\t\t}\n\t}\n\n\t// Kahn's algorithm for topological sort\n\tvar queue []string\n\tfor _, id := range modelIDs {\n\t\tif inDegree[id] == 0 {\n\t\t\tqueue = append(queue, id)\n\t\t}\n\t}\n\n\tvar order []string\n\tfor len(queue) > 0 {\n\t\tcurr := queue[0]\n\t\tqueue = queue[1:]\n\t\torder = append(order, curr)\n\n\t\tfor _, downstream := range g.RevDeps[curr] {\n\t\t\tinDegree[downstream]--\n\t\t\tif inDegree[downstream] == 0 {\n\t\t\t\tqueue = append(queue, downstream)\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(order) != len(modelIDs) {\n\t\treturn nil, fmt.Errorf(\"cycle detected in model dependencies\")\n\t}\n\n\tg.Order = order\n\treturn g, nil\n}\n\n// ConnectedComponents returns groups of model IDs (independent pipelines).\n// Each group preserves topological order from g.Order.\nfunc (g *DepGraph) ConnectedComponents() [][]string {\n\tall := make(map[string]bool, len(g.Order))\n\tfor _, id := range g.Order {\n\t\tall[id] = true\n\t}\n\n\tvisited := make(map[string]bool, len(g.Order))\n\tvar components [][]string\n\n\tfor _, id := range g.Order {\n\t\tif visited[id] {\n\t\t\tcontinue\n\t\t}\n\t\t// BFS on undirected edges\n\t\tcomponent := make(map[string]bool)\n\t\tqueue := []string{id}\n\t\tfor len(queue) > 0 {\n\t\t\tcur := queue[0]\n\t\t\tqueue = queue[1:]\n\t\t\tif visited[cur] {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tvisited[cur] = true\n\t\t\tcomponent[cur] = true\n\t\t\tfor _, dep := range g.Deps[cur] {\n\t\t\t\tif !visited[dep] && all[dep] {\n\t\t\t\t\tqueue = append(queue, dep)\n\t\t\t\t}\n\t\t\t}\n\t\t\tfor _, rev := range g.RevDeps[cur] {\n\t\t\t\tif !visited[rev] && all[rev] {\n\t\t\t\t\tqueue = append(queue, rev)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t// Filter g.Order to preserve topological order\n\t\tvar ordered []string\n\t\tfor _, oid := range g.Order {\n\t\t\tif component[oid] {\n\t\t\t\tordered = append(ordered, oid)\n\t\t\t}\n\t\t}\n\t\tcomponents = append(components, ordered)\n\t}\n\n\treturn components\n}\n\n// ComponentContaining returns the component that includes modelID,\n// preserving topological order from g.Order.\nfunc (g *DepGraph) ComponentContaining(modelID string) []string {\n\tfor _, comp := range g.ConnectedComponents() {\n\t\tfor _, id := range comp {\n\t\t\tif id == modelID {\n\t\t\t\treturn comp\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}\n\n// GetUpstreamDeps returns the transitive upstream dependencies for a model ID.\nfunc GetUpstreamDeps(modelID string, deps map[string][]string) map[string]bool {\n\tvisited := make(map[string]bool)\n\tvar walk func(id string)\n\twalk = func(id string) {\n\t\tfor _, depID := range deps[id] {\n\t\t\tif !visited[depID] {\n\t\t\t\tvisited[depID] = true\n\t\t\t\twalk(depID)\n\t\t\t}\n\t\t}\n\t}\n\twalk(modelID)\n\treturn visited\n}\n"
  },
  {
    "path": "internal/models/ref.go",
    "content": "package models\n\nimport (\n\t\"fmt\"\n\t\"regexp\"\n\t\"strings\"\n)\n\n// stripSQLComments removes single-line (-- ...) and block (/* ... */) comments\n// so that $ref() inside comments is not treated as a real reference.\nfunc stripSQLComments(sql string) string {\n\t// Remove block comments first (non-greedy, handles multiline)\n\tblockRe := regexp.MustCompile(`(?s)/\\*.*?\\*/`)\n\tsql = blockRe.ReplaceAllString(sql, \"\")\n\t// Remove single-line comments\n\tlineRe := regexp.MustCompile(`--[^\\n]*`)\n\tsql = lineRe.ReplaceAllString(sql, \"\")\n\treturn sql\n}\n\n// refPattern matches $ref(model_name) in SQL.\n// Model names follow ClickHouse identifier rules: [a-zA-Z_][a-zA-Z0-9_]*\nvar refPattern = regexp.MustCompile(`\\$ref\\(\\s*([a-zA-Z_][a-zA-Z0-9_]*)\\s*\\)`)\n\n// modelNamePattern validates model names as valid ClickHouse identifiers.\nvar modelNamePattern = regexp.MustCompile(`^[a-zA-Z_][a-zA-Z0-9_]*$`)\n\n// ValidateModelName checks if a name is a valid ClickHouse identifier.\nfunc ValidateModelName(name string) error {\n\tif name == \"\" {\n\t\treturn fmt.Errorf(\"model name cannot be empty\")\n\t}\n\tif !modelNamePattern.MatchString(name) {\n\t\treturn fmt.Errorf(\"model name %q must be a valid identifier (letters, digits, underscores, starting with letter or underscore)\", name)\n\t}\n\treturn nil\n}\n\n// ExtractRefs returns all model names referenced via $ref() in the SQL body.\n// $ref() occurrences inside SQL comments are ignored.\nfunc ExtractRefs(sqlBody string) []string {\n\tmatches := refPattern.FindAllStringSubmatch(stripSQLComments(sqlBody), -1)\n\tseen := make(map[string]bool)\n\tvar refs []string\n\tfor _, m := range matches {\n\t\tname := strings.TrimSpace(m[1])\n\t\tif !seen[name] {\n\t\t\tseen[name] = true\n\t\t\trefs = append(refs, name)\n\t\t}\n\t}\n\treturn refs\n}\n\n// ResolveRefs replaces all $ref(model_name) with `target_database`.`model_name`.\n// modelTargets maps model_name -> target_database.\n// $ref() occurrences inside SQL comments are ignored (comments are stripped first).\nfunc ResolveRefs(sqlBody string, modelTargets map[string]string) (string, error) {\n\tsqlBody = stripSQLComments(sqlBody)\n\tvar resolveErr error\n\tresolved := refPattern.ReplaceAllStringFunc(sqlBody, func(match string) string {\n\t\tsub := refPattern.FindStringSubmatch(match)\n\t\tif len(sub) < 2 {\n\t\t\treturn match\n\t\t}\n\t\tname := strings.TrimSpace(sub[1])\n\t\tdb, ok := modelTargets[name]\n\t\tif !ok {\n\t\t\tresolveErr = fmt.Errorf(\"unresolved reference: $ref(%s)\", name)\n\t\t\treturn match\n\t\t}\n\t\treturn fmt.Sprintf(\"`%s`.`%s`\", db, name)\n\t})\n\treturn resolved, resolveErr\n}\n"
  },
  {
    "path": "internal/models/runner.go",
    "content": "package models\n\nimport (\n\t\"fmt\"\n\t\"log/slog\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n)\n\n// Runner executes model builds against ClickHouse.\ntype Runner struct {\n\tdb      *database.DB\n\tgateway *tunnel.Gateway\n\tsecret  string\n\tmu      sync.Mutex // prevents concurrent runs per connection\n\trunning map[string]bool\n}\n\n// NewRunner creates a new model runner.\nfunc NewRunner(db *database.DB, gw *tunnel.Gateway, secret string) *Runner {\n\treturn &Runner{\n\t\tdb:      db,\n\t\tgateway: gw,\n\t\tsecret:  secret,\n\t\trunning: make(map[string]bool),\n\t}\n}\n\n// RunAll executes all models for a connection in dependency order.\nfunc (r *Runner) RunAll(connectionID, triggeredBy string) (string, error) {\n\tif err := r.acquireLock(connectionID); err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer r.releaseLock(connectionID)\n\n\tif !r.gateway.IsTunnelOnline(connectionID) {\n\t\treturn \"\", fmt.Errorf(\"tunnel not connected\")\n\t}\n\n\tuser, password, err := r.findCredentials(connectionID)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"no credentials: %w\", err)\n\t}\n\n\tallModels, err := r.db.GetModelsByConnection(connectionID)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"load models: %w\", err)\n\t}\n\tif len(allModels) == 0 {\n\t\treturn \"\", fmt.Errorf(\"no models defined\")\n\t}\n\n\tdag, idToModel, modelTargets, err := r.buildDAG(allModels)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn r.execute(connectionID, triggeredBy, dag, idToModel, modelTargets, user, password)\n}\n\n// RunPipeline executes only the connected component containing anchorModelID.\nfunc (r *Runner) RunPipeline(connectionID, anchorModelID, triggeredBy string) (string, error) {\n\tif err := r.acquireLock(connectionID); err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer r.releaseLock(connectionID)\n\n\tif !r.gateway.IsTunnelOnline(connectionID) {\n\t\treturn \"\", fmt.Errorf(\"tunnel not connected\")\n\t}\n\n\tuser, password, err := r.findCredentials(connectionID)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"no credentials: %w\", err)\n\t}\n\n\tallModels, err := r.db.GetModelsByConnection(connectionID)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"load models: %w\", err)\n\t}\n\tif len(allModels) == 0 {\n\t\treturn \"\", fmt.Errorf(\"no models defined\")\n\t}\n\n\tdag, idToModel, modelTargets, err := r.buildDAG(allModels)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tcomponent := dag.ComponentContaining(anchorModelID)\n\tif len(component) == 0 {\n\t\treturn \"\", fmt.Errorf(\"anchor model not found in DAG\")\n\t}\n\n\tdag.Order = component\n\treturn r.execute(connectionID, triggeredBy, dag, idToModel, modelTargets, user, password)\n}\n\n// RunSingle executes a single model and its upstream dependencies.\nfunc (r *Runner) RunSingle(connectionID, modelID, triggeredBy string) (string, error) {\n\tif err := r.acquireLock(connectionID); err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer r.releaseLock(connectionID)\n\n\tif !r.gateway.IsTunnelOnline(connectionID) {\n\t\treturn \"\", fmt.Errorf(\"tunnel not connected\")\n\t}\n\n\tuser, password, err := r.findCredentials(connectionID)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"no credentials: %w\", err)\n\t}\n\n\tallModels, err := r.db.GetModelsByConnection(connectionID)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"load models: %w\", err)\n\t}\n\n\tdag, idToModel, modelTargets, err := r.buildDAG(allModels)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\t// Filter to only the target model and its upstream deps\n\tupstream := GetUpstreamDeps(modelID, dag.Deps)\n\tupstream[modelID] = true\n\n\tvar filteredIDs []string\n\tfor _, id := range dag.Order {\n\t\tif upstream[id] {\n\t\t\tfilteredIDs = append(filteredIDs, id)\n\t\t}\n\t}\n\tdag.Order = filteredIDs\n\n\treturn r.execute(connectionID, triggeredBy, dag, idToModel, modelTargets, user, password)\n}\n\n// Validate checks all models for reference errors and cycles.\nfunc (r *Runner) Validate(connectionID string) ([]ValidationError, error) {\n\tallModels, err := r.db.GetModelsByConnection(connectionID)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"load models: %w\", err)\n\t}\n\tif len(allModels) == 0 {\n\t\treturn nil, nil\n\t}\n\n\tnameToID := make(map[string]string)\n\tfor _, m := range allModels {\n\t\tnameToID[m.Name] = m.ID\n\t}\n\n\tvar errors []ValidationError\n\trefsByID := make(map[string][]string)\n\n\tfor _, m := range allModels {\n\t\trefs := ExtractRefs(m.SQLBody)\n\t\trefsByID[m.ID] = refs\n\t\tfor _, ref := range refs {\n\t\t\tif _, ok := nameToID[ref]; !ok {\n\t\t\t\terrors = append(errors, ValidationError{\n\t\t\t\t\tModelID:   m.ID,\n\t\t\t\t\tModelName: m.Name,\n\t\t\t\t\tError:     fmt.Sprintf(\"references unknown model %q via $ref()\", ref),\n\t\t\t\t})\n\t\t\t}\n\t\t\tif nameToID[ref] == m.ID {\n\t\t\t\terrors = append(errors, ValidationError{\n\t\t\t\t\tModelID:   m.ID,\n\t\t\t\t\tModelName: m.Name,\n\t\t\t\t\tError:     fmt.Sprintf(\"cannot reference itself via $ref(%s)\", ref),\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(errors) > 0 {\n\t\treturn errors, nil\n\t}\n\n\t// Check for cycles\n\tvar modelIDs []string\n\tfor _, m := range allModels {\n\t\tmodelIDs = append(modelIDs, m.ID)\n\t}\n\n\t_, dagErr := BuildDAG(modelIDs, refsByID, nameToID)\n\tif dagErr != nil {\n\t\terrors = append(errors, ValidationError{\n\t\t\tError: dagErr.Error(),\n\t\t})\n\t}\n\n\treturn errors, nil\n}\n\n// ValidationError represents a validation problem.\ntype ValidationError struct {\n\tModelID   string `json:\"model_id,omitempty\"`\n\tModelName string `json:\"model_name,omitempty\"`\n\tError     string `json:\"error\"`\n}\n\n// ── Internal helpers ────────────────────────────────────────────────\n\nfunc (r *Runner) buildDAG(allModels []database.Model) (*DepGraph, map[string]database.Model, map[string]string, error) {\n\tnameToID := make(map[string]string)\n\tidToModel := make(map[string]database.Model)\n\tmodelTargets := make(map[string]string)\n\tvar modelIDs []string\n\trefsByID := make(map[string][]string)\n\n\tfor _, m := range allModels {\n\t\tnameToID[m.Name] = m.ID\n\t\tidToModel[m.ID] = m\n\t\tmodelTargets[m.Name] = m.TargetDatabase\n\t\tmodelIDs = append(modelIDs, m.ID)\n\t\trefsByID[m.ID] = ExtractRefs(m.SQLBody)\n\t}\n\n\tdag, err := BuildDAG(modelIDs, refsByID, nameToID)\n\tif err != nil {\n\t\treturn nil, nil, nil, fmt.Errorf(\"build DAG: %w\", err)\n\t}\n\n\treturn dag, idToModel, modelTargets, nil\n}\n\nfunc (r *Runner) execute(connectionID, triggeredBy string, dag *DepGraph, idToModel map[string]database.Model, modelTargets map[string]string, user, password string) (string, error) {\n\trunID, err := r.db.CreateModelRun(connectionID, len(dag.Order), triggeredBy)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"create run: %w\", err)\n\t}\n\n\t// Create pending result records\n\tfor _, id := range dag.Order {\n\t\tm := idToModel[id]\n\t\tif _, err := r.db.CreateModelRunResult(runID, m.ID, m.Name); err != nil {\n\t\t\tslog.Error(\"Failed to create run result\", \"model\", m.Name, \"error\", err)\n\t\t}\n\t}\n\n\t// Execute in topological order\n\tfailed := make(map[string]bool)\n\tvar succeeded, failedCount, skipped int\n\n\tfor _, id := range dag.Order {\n\t\tm := idToModel[id]\n\n\t\t// Skip if any upstream dependency failed\n\t\tshouldSkip := false\n\t\tfor _, depID := range dag.Deps[id] {\n\t\t\tif failed[depID] {\n\t\t\t\tshouldSkip = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\tif shouldSkip {\n\t\t\tskipped++\n\t\t\tfailed[id] = true\n\t\t\tr.db.UpdateModelRunResult(runID, id, \"skipped\", \"\", 0, \"upstream dependency failed\")\n\t\t\tr.db.UpdateModelStatus(id, \"error\", \"upstream dependency failed\")\n\t\t\tcontinue\n\t\t}\n\n\t\t// Resolve $ref()\n\t\tresolvedSQL, resolveErr := ResolveRefs(m.SQLBody, modelTargets)\n\t\tif resolveErr != nil {\n\t\t\tfailedCount++\n\t\t\tfailed[id] = true\n\t\t\tr.db.UpdateModelRunResult(runID, id, \"error\", resolvedSQL, 0, resolveErr.Error())\n\t\t\tr.db.UpdateModelStatus(id, \"error\", resolveErr.Error())\n\t\t\tcontinue\n\t\t}\n\n\t\t// Mark as running\n\t\tr.db.UpdateModelRunResult(runID, id, \"running\", \"\", 0, \"\")\n\n\t\t// Build and execute DDL\n\t\tstmts := buildDDL(m, resolvedSQL)\n\t\tstart := time.Now()\n\t\tvar execErr error\n\n\t\tfor _, stmt := range stmts {\n\t\t\t_, execErr = r.gateway.ExecuteQuery(connectionID, stmt, user, password, 5*time.Minute)\n\t\t\tif execErr != nil {\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\telapsed := time.Since(start).Milliseconds()\n\t\tddlForLog := stmts[len(stmts)-1] // log the main statement\n\n\t\tif execErr != nil {\n\t\t\tfailedCount++\n\t\t\tfailed[id] = true\n\t\t\tr.db.UpdateModelRunResult(runID, id, \"error\", ddlForLog, elapsed, execErr.Error())\n\t\t\tr.db.UpdateModelStatus(id, \"error\", execErr.Error())\n\t\t\tslog.Error(\"Model execution failed\", \"model\", m.Name, \"error\", execErr)\n\t\t} else {\n\t\t\tsucceeded++\n\t\t\tr.db.UpdateModelRunResult(runID, id, \"success\", ddlForLog, elapsed, \"\")\n\t\t\tr.db.UpdateModelStatus(id, \"success\", \"\")\n\t\t}\n\t}\n\n\t// Finalize run\n\trunStatus := \"success\"\n\tif failedCount > 0 && succeeded > 0 {\n\t\trunStatus = \"partial\"\n\t} else if failedCount > 0 || skipped == len(dag.Order) {\n\t\trunStatus = \"error\"\n\t}\n\tr.db.FinalizeModelRun(runID, runStatus, succeeded, failedCount, skipped)\n\n\treturn runID, nil\n}\n\n// buildDDL generates the DDL statement(s) for a model.\n// Returns a slice because TABLE needs DROP + CREATE as separate statements.\nfunc buildDDL(m database.Model, resolvedSQL string) []string {\n\tswitch m.Materialization {\n\tcase \"table\":\n\t\tdrop := fmt.Sprintf(\"DROP TABLE IF EXISTS `%s`.`%s`\", m.TargetDatabase, m.Name)\n\t\tcreate := fmt.Sprintf(\"CREATE TABLE `%s`.`%s` ENGINE = %s ORDER BY %s AS %s\",\n\t\t\tm.TargetDatabase, m.Name, m.TableEngine, m.OrderBy, resolvedSQL)\n\t\treturn []string{drop, create}\n\tdefault: // view\n\t\treturn []string{\n\t\t\tfmt.Sprintf(\"CREATE OR REPLACE VIEW `%s`.`%s` AS %s\",\n\t\t\t\tm.TargetDatabase, m.Name, resolvedSQL),\n\t\t}\n\t}\n}\n\nfunc (r *Runner) acquireLock(connectionID string) error {\n\tr.mu.Lock()\n\tdefer r.mu.Unlock()\n\tif r.running[connectionID] {\n\t\treturn fmt.Errorf(\"a model run is already in progress for this connection\")\n\t}\n\tr.running[connectionID] = true\n\treturn nil\n}\n\nfunc (r *Runner) releaseLock(connectionID string) {\n\tr.mu.Lock()\n\tdefer r.mu.Unlock()\n\tdelete(r.running, connectionID)\n}\n\nfunc (r *Runner) findCredentials(connectionID string) (string, string, error) {\n\tsessions, err := r.db.GetActiveSessionsByConnection(connectionID, 3)\n\tif err != nil {\n\t\treturn \"\", \"\", fmt.Errorf(\"failed to load sessions: %w\", err)\n\t}\n\tfor _, s := range sessions {\n\t\tpassword, err := crypto.Decrypt(s.EncryptedPassword, r.secret)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\treturn s.ClickhouseUser, password, nil\n\t}\n\treturn \"\", \"\", fmt.Errorf(\"no active sessions with valid credentials for connection %s\", connectionID)\n}\n"
  },
  {
    "path": "internal/models/scheduler.go",
    "content": "package models\n\nimport (\n\t\"log/slog\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/scheduler\"\n)\n\nconst modelTickInterval = 30 * time.Second\n\n// Scheduler checks for due model schedules and triggers RunAll.\ntype Scheduler struct {\n\tdb     *database.DB\n\trunner *Runner\n\tstopCh chan struct{}\n}\n\n// NewScheduler creates a new model scheduler.\nfunc NewScheduler(db *database.DB, runner *Runner) *Scheduler {\n\treturn &Scheduler{\n\t\tdb:     db,\n\t\trunner: runner,\n\t\tstopCh: make(chan struct{}),\n\t}\n}\n\n// Start begins the scheduler goroutine.\nfunc (s *Scheduler) Start() {\n\tgo func() {\n\t\tslog.Info(\"Model scheduler started\", \"interval\", modelTickInterval)\n\t\tticker := time.NewTicker(modelTickInterval)\n\t\tdefer ticker.Stop()\n\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-s.stopCh:\n\t\t\t\tslog.Info(\"Model scheduler stopped\")\n\t\t\t\treturn\n\t\t\tcase <-ticker.C:\n\t\t\t\ts.tick()\n\t\t\t}\n\t\t}\n\t}()\n}\n\n// Stop signals the scheduler goroutine to stop.\nfunc (s *Scheduler) Stop() {\n\tclose(s.stopCh)\n}\n\nfunc (s *Scheduler) tick() {\n\tschedules, err := s.db.GetEnabledModelSchedules()\n\tif err != nil {\n\t\tslog.Error(\"Failed to load enabled model schedules\", \"error\", err)\n\t\treturn\n\t}\n\n\tnow := time.Now().UTC()\n\tfor _, sched := range schedules {\n\t\tif sched.NextRunAt == nil || sched.AnchorModelID == nil {\n\t\t\tcontinue\n\t\t}\n\t\tnextRun, err := time.Parse(time.RFC3339, *sched.NextRunAt)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tif nextRun.After(now) {\n\t\t\tcontinue\n\t\t}\n\n\t\tslog.Info(\"Model schedule triggered\",\n\t\t\t\"connection_id\", sched.ConnectionID,\n\t\t\t\"anchor_model_id\", *sched.AnchorModelID,\n\t\t\t\"cron\", sched.Cron)\n\n\t\tstatus := \"success\"\n\t\tvar runError string\n\n\t\t_, runErr := s.runner.RunPipeline(sched.ConnectionID, *sched.AnchorModelID, \"scheduler\")\n\t\tif runErr != nil {\n\t\t\tstatus = \"error\"\n\t\t\trunError = runErr.Error()\n\t\t\tslog.Error(\"Scheduled model run failed\",\n\t\t\t\t\"connection_id\", sched.ConnectionID,\n\t\t\t\t\"anchor_model_id\", *sched.AnchorModelID,\n\t\t\t\t\"error\", runErr)\n\t\t}\n\n\t\t// Compute next run and update status by schedule ID\n\t\tvar nextRunAt *string\n\t\tif next := scheduler.ComputeNextRun(sched.Cron, time.Now().UTC()); next != nil {\n\t\t\tformatted := next.Format(time.RFC3339)\n\t\t\tnextRunAt = &formatted\n\t\t}\n\n\t\tif err := s.db.UpdateModelScheduleStatusByID(sched.ID, status, runError, nextRunAt); err != nil {\n\t\t\tslog.Error(\"Failed to update model schedule status\", \"schedule_id\", sched.ID, \"error\", err)\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "internal/pipelines/clickhouse_sink.go",
    "content": "package pipelines\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"sort\"\n\t\"strings\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n)\n\n// ClickHouseSink writes batches to a ClickHouse table via the tunnel gateway.\ntype ClickHouseSink struct {\n\tgateway   *tunnel.Gateway\n\tdb        *database.DB\n\tsecretKey string\n\n\ttableOnce sync.Once\n\ttableErr  error\n}\n\n// NewClickHouseSink creates a new ClickHouse sink connector.\nfunc NewClickHouseSink(gw *tunnel.Gateway, db *database.DB, secretKey string) *ClickHouseSink {\n\treturn &ClickHouseSink{\n\t\tgateway:   gw,\n\t\tdb:        db,\n\t\tsecretKey: secretKey,\n\t}\n}\n\nfunc (s *ClickHouseSink) Type() string { return \"sink_clickhouse\" }\n\n// Validate checks the sink configuration.\nfunc (s *ClickHouseSink) Validate(cfg ConnectorConfig) error {\n\tdb, _ := cfg.Fields[\"database\"].(string)\n\ttable, _ := cfg.Fields[\"table\"].(string)\n\tif db == \"\" {\n\t\treturn fmt.Errorf(\"database is required\")\n\t}\n\tif table == \"\" {\n\t\treturn fmt.Errorf(\"table is required\")\n\t}\n\treturn nil\n}\n\n// WriteBatch inserts a batch of records into the ClickHouse table using INSERT FORMAT JSONEachRow.\nfunc (s *ClickHouseSink) WriteBatch(ctx context.Context, cfg ConnectorConfig, batch Batch) (int, error) {\n\tif len(batch.Records) == 0 {\n\t\treturn 0, nil\n\t}\n\n\t// Auto-create table on first batch if configured\n\tif boolField(cfg.Fields, \"create_table\", false) {\n\t\ts.tableOnce.Do(func() {\n\t\t\ts.tableErr = s.ensureTable(ctx, cfg, batch)\n\t\t})\n\t\tif s.tableErr != nil {\n\t\t\treturn 0, fmt.Errorf(\"ensure table: %w\", s.tableErr)\n\t\t}\n\t}\n\n\tdb, _ := cfg.Fields[\"database\"].(string)\n\ttable, _ := cfg.Fields[\"table\"].(string)\n\n\t// Build JSONEachRow payload\n\tvar sb strings.Builder\n\tfor _, rec := range batch.Records {\n\t\tif len(rec.RawJSON) > 0 {\n\t\t\tsb.Write(rec.RawJSON)\n\t\t} else {\n\t\t\traw, err := json.Marshal(rec.Data)\n\t\t\tif err != nil {\n\t\t\t\treturn 0, fmt.Errorf(\"marshal record: %w\", err)\n\t\t\t}\n\t\t\tsb.Write(raw)\n\t\t}\n\t\tsb.WriteByte('\\n')\n\t}\n\n\tquery := fmt.Sprintf(\"INSERT INTO `%s`.`%s` FORMAT JSONEachRow\\n%s\", db, table, sb.String())\n\n\t// Find credentials from the pipeline's connection\n\tconnectionID, _ := cfg.Fields[\"connection_id\"].(string)\n\tif connectionID == \"\" {\n\t\treturn 0, fmt.Errorf(\"no connection_id in sink config\")\n\t}\n\n\tuser, password, err := s.findCredentials(connectionID)\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"find credentials: %w\", err)\n\t}\n\n\t_, execErr := s.gateway.ExecuteQuery(connectionID, query, user, password, 30*time.Second)\n\tif execErr != nil {\n\t\treturn 0, fmt.Errorf(\"execute insert: %w\", execErr)\n\t}\n\n\treturn len(batch.Records), nil\n}\n\n// ensureTable creates the target table if it doesn't exist, inferring schema from the first batch.\nfunc (s *ClickHouseSink) ensureTable(ctx context.Context, cfg ConnectorConfig, batch Batch) error {\n\tdb := stringField(cfg.Fields, \"database\", \"default\")\n\ttable := stringField(cfg.Fields, \"table\", \"\")\n\tengine := stringField(cfg.Fields, \"create_table_engine\", \"MergeTree\")\n\torderBy := stringField(cfg.Fields, \"create_table_order_by\", \"tuple()\")\n\n\tif table == \"\" {\n\t\treturn fmt.Errorf(\"table name is required for auto-creation\")\n\t}\n\tif orderBy == \"\" {\n\t\torderBy = \"tuple()\"\n\t}\n\n\t// Infer columns from first record\n\tif len(batch.Records) == 0 {\n\t\treturn fmt.Errorf(\"cannot infer schema from empty batch\")\n\t}\n\n\tdata := batch.Records[0].Data\n\tif len(data) == 0 {\n\t\treturn fmt.Errorf(\"cannot infer schema from empty record\")\n\t}\n\n\t// Collect column names sorted for deterministic output\n\tcolNames := make([]string, 0, len(data))\n\tfor k := range data {\n\t\tcolNames = append(colNames, k)\n\t}\n\tsort.Strings(colNames)\n\n\t// Build column definitions\n\tvar cols []string\n\tfor _, name := range colNames {\n\t\tchType := inferClickHouseType(data[name])\n\t\tcols = append(cols, fmt.Sprintf(\"`%s` %s\", name, chType))\n\t}\n\n\tddl := fmt.Sprintf(\"CREATE TABLE IF NOT EXISTS `%s`.`%s` (\\n  %s\\n) ENGINE = %s\\nORDER BY %s\",\n\t\tdb, table, strings.Join(cols, \",\\n  \"), engine, orderBy)\n\n\tconnectionID, _ := cfg.Fields[\"connection_id\"].(string)\n\tif connectionID == \"\" {\n\t\treturn fmt.Errorf(\"no connection_id in sink config\")\n\t}\n\n\tuser, password, err := s.findCredentials(connectionID)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"find credentials: %w\", err)\n\t}\n\n\t_, execErr := s.gateway.ExecuteQuery(connectionID, ddl, user, password, 30*time.Second)\n\tif execErr != nil {\n\t\treturn fmt.Errorf(\"execute CREATE TABLE: %w\", execErr)\n\t}\n\n\tslog.Info(\"Auto-created ClickHouse table\", \"database\", db, \"table\", table, \"engine\", engine, \"columns\", len(cols))\n\treturn nil\n}\n\n// inferClickHouseType maps a Go/JSON value to a ClickHouse column type.\nfunc inferClickHouseType(v interface{}) string {\n\tswitch v.(type) {\n\tcase string:\n\t\treturn \"String\"\n\tcase float64:\n\t\treturn \"Float64\"\n\tcase bool:\n\t\treturn \"UInt8\"\n\tcase nil:\n\t\treturn \"Nullable(String)\"\n\tdefault:\n\t\treturn \"String\"\n\t}\n}\n\n// findCredentials retrieves ClickHouse credentials from active sessions.\nfunc (s *ClickHouseSink) findCredentials(connectionID string) (string, string, error) {\n\tsessions, err := s.db.GetActiveSessionsByConnection(connectionID, 3)\n\tif err != nil {\n\t\treturn \"\", \"\", fmt.Errorf(\"failed to load sessions: %w\", err)\n\t}\n\n\tfor _, sess := range sessions {\n\t\tpassword, err := crypto.Decrypt(sess.EncryptedPassword, s.secretKey)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\treturn sess.ClickhouseUser, password, nil\n\t}\n\n\treturn \"\", \"\", fmt.Errorf(\"no active sessions with valid credentials for connection %s\", connectionID)\n}\n"
  },
  {
    "path": "internal/pipelines/database_source.go",
    "content": "package pipelines\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"time\"\n\n\t_ \"github.com/go-sql-driver/mysql\"\n\t_ \"github.com/lib/pq\"\n\t_ \"modernc.org/sqlite\"\n)\n\n// DatabaseSource polls a PostgreSQL or MySQL database for new rows.\ntype DatabaseSource struct{}\n\nfunc (d *DatabaseSource) Type() string { return \"source_database\" }\n\n// Validate checks database source configuration.\nfunc (d *DatabaseSource) Validate(cfg ConnectorConfig) error {\n\tdbType := stringField(cfg.Fields, \"db_type\", \"\")\n\tconnStr := stringField(cfg.Fields, \"connection_string\", \"\")\n\tquery := stringField(cfg.Fields, \"query\", \"\")\n\n\tif dbType == \"\" {\n\t\treturn fmt.Errorf(\"db_type is required (postgres, mysql, or sqlite)\")\n\t}\n\tif dbType != \"postgres\" && dbType != \"mysql\" && dbType != \"sqlite\" {\n\t\treturn fmt.Errorf(\"db_type must be 'postgres', 'mysql', or 'sqlite'\")\n\t}\n\tif connStr == \"\" {\n\t\treturn fmt.Errorf(\"connection_string is required\")\n\t}\n\tif query == \"\" {\n\t\treturn fmt.Errorf(\"query is required\")\n\t}\n\treturn nil\n}\n\n// Start begins polling the source database and sends batches to the output channel.\nfunc (d *DatabaseSource) Start(ctx context.Context, cfg ConnectorConfig, out chan<- Batch) error {\n\tdbType := stringField(cfg.Fields, \"db_type\", \"\")\n\tconnStr := stringField(cfg.Fields, \"connection_string\", \"\")\n\tquery := stringField(cfg.Fields, \"query\", \"\")\n\tpollIntervalSec := intField(cfg.Fields, \"poll_interval\", 60)\n\twatermarkCol := stringField(cfg.Fields, \"watermark_column\", \"\")\n\tbatchSize := intField(cfg.Fields, \"batch_size\", 1000)\n\n\t// Map db_type to driver name\n\tdriver := dbType\n\tif dbType == \"postgres\" {\n\t\tdriver = \"postgres\"\n\t}\n\n\tdb, err := sql.Open(driver, connStr)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"open database: %w\", err)\n\t}\n\tdefer db.Close()\n\n\tdb.SetMaxOpenConns(2)\n\tdb.SetMaxIdleConns(1)\n\tdb.SetConnMaxLifetime(5 * time.Minute)\n\n\tif err := db.PingContext(ctx); err != nil {\n\t\treturn fmt.Errorf(\"ping database: %w\", err)\n\t}\n\n\tslog.Info(\"Database source started\", \"type\", dbType, \"poll_interval\", pollIntervalSec)\n\n\tvar watermark interface{}\n\tticker := time.NewTicker(time.Duration(pollIntervalSec) * time.Second)\n\tdefer ticker.Stop()\n\n\tpoll := func() error {\n\t\tvar rows *sql.Rows\n\t\tvar queryErr error\n\n\t\tif watermarkCol != \"\" && watermark != nil {\n\t\t\trows, queryErr = db.QueryContext(ctx, query, watermark)\n\t\t} else {\n\t\t\trows, queryErr = db.QueryContext(ctx, query)\n\t\t}\n\t\tif queryErr != nil {\n\t\t\treturn fmt.Errorf(\"query: %w\", queryErr)\n\t\t}\n\t\tdefer rows.Close()\n\n\t\tcolumns, err := rows.Columns()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"get columns: %w\", err)\n\t\t}\n\n\t\tvar buf []Record\n\t\tfor rows.Next() {\n\t\t\tvalues := make([]interface{}, len(columns))\n\t\t\tvaluePtrs := make([]interface{}, len(columns))\n\t\t\tfor i := range values {\n\t\t\t\tvaluePtrs[i] = &values[i]\n\t\t\t}\n\n\t\t\tif err := rows.Scan(valuePtrs...); err != nil {\n\t\t\t\tslog.Warn(\"Database source row scan error\", \"error\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tdata := make(map[string]interface{})\n\t\t\tfor i, col := range columns {\n\t\t\t\tval := values[i]\n\t\t\t\t// Convert []byte to string for JSON compatibility\n\t\t\t\tif b, ok := val.([]byte); ok {\n\t\t\t\t\tdata[col] = string(b)\n\t\t\t\t} else {\n\t\t\t\t\tdata[col] = val\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Update watermark\n\t\t\tif watermarkCol != \"\" {\n\t\t\t\tif wv, ok := data[watermarkCol]; ok {\n\t\t\t\t\twatermark = wv\n\t\t\t\t}\n\t\t\t}\n\n\t\t\traw, _ := json.Marshal(data)\n\t\t\tbuf = append(buf, Record{\n\t\t\t\tData:    data,\n\t\t\t\tRawJSON: raw,\n\t\t\t})\n\n\t\t\tif len(buf) >= batchSize {\n\t\t\t\tselect {\n\t\t\t\tcase out <- Batch{Records: buf, SourceTS: time.Now()}:\n\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\tbuf = nil\n\t\t\t}\n\t\t}\n\n\t\tif err := rows.Err(); err != nil {\n\t\t\treturn fmt.Errorf(\"rows iteration: %w\", err)\n\t\t}\n\n\t\t// Flush remaining\n\t\tif len(buf) > 0 {\n\t\t\tselect {\n\t\t\tcase out <- Batch{Records: buf, SourceTS: time.Now()}:\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t}\n\n\t// First poll\n\tif err := poll(); err != nil {\n\t\tslog.Error(\"Database source poll error\", \"error\", err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\treturn nil\n\t\tcase <-ticker.C:\n\t\t\tif err := poll(); err != nil {\n\t\t\t\tslog.Error(\"Database source poll error\", \"error\", err)\n\t\t\t}\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "internal/pipelines/helpers.go",
    "content": "package pipelines\n\n// intField extracts an int from a config map with a default fallback.\nfunc intField(fields map[string]interface{}, key string, def int) int {\n\tv, ok := fields[key]\n\tif !ok {\n\t\treturn def\n\t}\n\tswitch n := v.(type) {\n\tcase float64:\n\t\treturn int(n)\n\tcase int:\n\t\treturn n\n\tcase int64:\n\t\treturn int(n)\n\tdefault:\n\t\treturn def\n\t}\n}\n\n// stringField extracts a string from a config map with a default fallback.\nfunc stringField(fields map[string]interface{}, key, def string) string {\n\tv, ok := fields[key]\n\tif !ok {\n\t\treturn def\n\t}\n\ts, ok := v.(string)\n\tif !ok {\n\t\treturn def\n\t}\n\treturn s\n}\n\n// boolField extracts a bool from a config map with a default fallback.\nfunc boolField(fields map[string]interface{}, key string, def bool) bool {\n\tv, ok := fields[key]\n\tif !ok {\n\t\treturn def\n\t}\n\tb, ok := v.(bool)\n\tif !ok {\n\t\treturn def\n\t}\n\treturn b\n}\n"
  },
  {
    "path": "internal/pipelines/kafka.go",
    "content": "package pipelines\n\nimport (\n\t\"context\"\n\t\"crypto/tls\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/IBM/sarama\"\n)\n\n// KafkaSource consumes messages from a Kafka topic using a consumer group.\ntype KafkaSource struct{}\n\nfunc (k *KafkaSource) Type() string { return \"source_kafka\" }\n\n// Validate checks Kafka configuration.\nfunc (k *KafkaSource) Validate(cfg ConnectorConfig) error {\n\tbrokers, _ := cfg.Fields[\"brokers\"].(string)\n\ttopic, _ := cfg.Fields[\"topic\"].(string)\n\tif brokers == \"\" {\n\t\treturn fmt.Errorf(\"brokers is required\")\n\t}\n\tif topic == \"\" {\n\t\treturn fmt.Errorf(\"topic is required\")\n\t}\n\treturn nil\n}\n\n// Start begins consuming messages from Kafka and sends batches to the output channel.\nfunc (k *KafkaSource) Start(ctx context.Context, cfg ConnectorConfig, out chan<- Batch) error {\n\tbrokers := strings.Split(stringField(cfg.Fields, \"brokers\", \"\"), \",\")\n\ttopic := stringField(cfg.Fields, \"topic\", \"\")\n\tgroup := stringField(cfg.Fields, \"consumer_group\", \"ch-ui-pipeline\")\n\tbatchSize := intField(cfg.Fields, \"batch_size\", 500)\n\tbatchTimeoutMs := intField(cfg.Fields, \"batch_timeout_ms\", 5000)\n\n\tconfig := sarama.NewConfig()\n\tconfig.Consumer.Group.Rebalance.GroupStrategies = []sarama.BalanceStrategy{sarama.NewBalanceStrategyRoundRobin()}\n\tconfig.Consumer.Offsets.Initial = sarama.OffsetNewest\n\tconfig.Version = sarama.V2_6_0_0\n\n\t// SASL configuration\n\tsaslMechanism := stringField(cfg.Fields, \"sasl_mechanism\", \"\")\n\tif saslMechanism != \"\" {\n\t\tconfig.Net.SASL.Enable = true\n\t\tconfig.Net.SASL.User = stringField(cfg.Fields, \"sasl_username\", \"\")\n\t\tconfig.Net.SASL.Password = stringField(cfg.Fields, \"sasl_password\", \"\")\n\n\t\tswitch strings.ToUpper(saslMechanism) {\n\t\tcase \"PLAIN\":\n\t\t\tconfig.Net.SASL.Mechanism = sarama.SASLTypePlaintext\n\t\tcase \"SCRAM-SHA-256\":\n\t\t\tconfig.Net.SASL.Mechanism = sarama.SASLTypeSCRAMSHA256\n\t\t\tconfig.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &scramClient{HashGeneratorFcn: SHA256} }\n\t\tcase \"SCRAM-SHA-512\":\n\t\t\tconfig.Net.SASL.Mechanism = sarama.SASLTypeSCRAMSHA512\n\t\t\tconfig.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &scramClient{HashGeneratorFcn: SHA512} }\n\t\t}\n\t}\n\n\t// TLS\n\tif boolField(cfg.Fields, \"use_tls\", false) {\n\t\tconfig.Net.TLS.Enable = true\n\t\tconfig.Net.TLS.Config = &tls.Config{\n\t\t\tMinVersion: tls.VersionTLS12,\n\t\t}\n\t}\n\n\t// Create consumer group\n\tclient, err := sarama.NewConsumerGroup(brokers, group, config)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"create kafka consumer group: %w\", err)\n\t}\n\tdefer client.Close()\n\n\thandler := &kafkaGroupHandler{\n\t\tbatchSize:      batchSize,\n\t\tbatchTimeoutMs: batchTimeoutMs,\n\t\tout:            out,\n\t}\n\n\tslog.Info(\"Kafka source started\", \"brokers\", brokers, \"topic\", topic, \"group\", group)\n\n\tfor {\n\t\tif ctx.Err() != nil {\n\t\t\treturn nil\n\t\t}\n\t\tif err := client.Consume(ctx, []string{topic}, handler); err != nil {\n\t\t\treturn fmt.Errorf(\"kafka consume: %w\", err)\n\t\t}\n\t}\n}\n\n// kafkaGroupHandler implements sarama.ConsumerGroupHandler.\ntype kafkaGroupHandler struct {\n\tbatchSize      int\n\tbatchTimeoutMs int\n\tout            chan<- Batch\n}\n\nfunc (h *kafkaGroupHandler) Setup(_ sarama.ConsumerGroupSession) error   { return nil }\nfunc (h *kafkaGroupHandler) Cleanup(_ sarama.ConsumerGroupSession) error { return nil }\n\nfunc (h *kafkaGroupHandler) ConsumeClaim(session sarama.ConsumerGroupSession, claim sarama.ConsumerGroupClaim) error {\n\tvar buf []Record\n\tticker := time.NewTicker(time.Duration(h.batchTimeoutMs) * time.Millisecond)\n\tdefer ticker.Stop()\n\n\tflush := func() {\n\t\tif len(buf) == 0 {\n\t\t\treturn\n\t\t}\n\t\tbatch := Batch{\n\t\t\tRecords:  buf,\n\t\t\tSourceTS: time.Now(),\n\t\t}\n\t\tselect {\n\t\tcase h.out <- batch:\n\t\tcase <-session.Context().Done():\n\t\t\treturn\n\t\t}\n\t\tbuf = nil\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-session.Context().Done():\n\t\t\tflush()\n\t\t\treturn nil\n\t\tcase msg, ok := <-claim.Messages():\n\t\t\tif !ok {\n\t\t\t\tflush()\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tvar data map[string]interface{}\n\t\t\tif err := json.Unmarshal(msg.Value, &data); err != nil {\n\t\t\t\tslog.Warn(\"Kafka message parse error, wrapping as raw\", \"error\", err, \"offset\", msg.Offset)\n\t\t\t\tdata = map[string]interface{}{\n\t\t\t\t\t\"_raw\":       string(msg.Value),\n\t\t\t\t\t\"_topic\":     msg.Topic,\n\t\t\t\t\t\"_partition\": msg.Partition,\n\t\t\t\t\t\"_offset\":    msg.Offset,\n\t\t\t\t\t\"_timestamp\": msg.Timestamp.UTC().Format(time.RFC3339),\n\t\t\t\t}\n\t\t\t}\n\t\t\tbuf = append(buf, Record{\n\t\t\t\tData:    data,\n\t\t\t\tRawJSON: msg.Value,\n\t\t\t})\n\t\t\tsession.MarkMessage(msg, \"\")\n\t\t\tif len(buf) >= h.batchSize {\n\t\t\t\tflush()\n\t\t\t}\n\t\tcase <-ticker.C:\n\t\t\tflush()\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "internal/pipelines/kafka_scram.go",
    "content": "package pipelines\n\nimport (\n\t\"crypto/sha256\"\n\t\"crypto/sha512\"\n\t\"hash\"\n\n\t\"github.com/xdg-go/scram\"\n)\n\n// SHA256 and SHA512 hash generators for SCRAM authentication.\nvar (\n\tSHA256 scram.HashGeneratorFcn = func() hash.Hash { return sha256.New() }\n\tSHA512 scram.HashGeneratorFcn = func() hash.Hash { return sha512.New() }\n)\n\n// scramClient implements sarama.SCRAMClient using xdg-go/scram.\ntype scramClient struct {\n\t*scram.ClientConversation\n\tscram.HashGeneratorFcn\n}\n\nfunc (c *scramClient) Begin(userName, password, authzID string) (err error) {\n\tclient, err := c.HashGeneratorFcn.NewClient(userName, password, authzID)\n\tif err != nil {\n\t\treturn err\n\t}\n\tc.ClientConversation = client.NewConversation()\n\treturn nil\n}\n\nfunc (c *scramClient) Step(challenge string) (string, error) {\n\treturn c.ClientConversation.Step(challenge)\n}\n\nfunc (c *scramClient) Done() bool {\n\treturn c.ClientConversation.Done()\n}\n"
  },
  {
    "path": "internal/pipelines/registry.go",
    "content": "package pipelines\n\nimport \"fmt\"\n\n// NewSource returns a SourceConnector for the given node type.\nfunc NewSource(nodeType string) (SourceConnector, error) {\n\tswitch nodeType {\n\tcase \"source_kafka\":\n\t\treturn &KafkaSource{}, nil\n\tcase \"source_webhook\":\n\t\treturn &WebhookSource{}, nil\n\tcase \"source_database\":\n\t\treturn &DatabaseSource{}, nil\n\tcase \"source_s3\":\n\t\treturn &S3Source{}, nil\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"unknown source type: %s\", nodeType)\n\t}\n}\n"
  },
  {
    "path": "internal/pipelines/runner.go",
    "content": "package pipelines\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n)\n\n// RunningPipeline is the runtime state of a single active pipeline.\ntype RunningPipeline struct {\n\tPipelineID string\n\tRunID      string\n\tCancel     context.CancelFunc\n\tMetrics    *Metrics\n\tStartedAt  time.Time\n\tDone       chan struct{}\n}\n\n// Runner manages the lifecycle of all running pipelines.\ntype Runner struct {\n\tdb      *database.DB\n\tgateway *tunnel.Gateway\n\tcfg     *config.Config\n\n\tmu        sync.RWMutex\n\tpipelines map[string]*RunningPipeline\n\tstopCh    chan struct{}\n}\n\n// NewRunner creates a new pipeline runner.\nfunc NewRunner(db *database.DB, gw *tunnel.Gateway, cfg *config.Config) *Runner {\n\treturn &Runner{\n\t\tdb:        db,\n\t\tgateway:   gw,\n\t\tcfg:       cfg,\n\t\tpipelines: make(map[string]*RunningPipeline),\n\t\tstopCh:    make(chan struct{}),\n\t}\n}\n\n// Start resumes any pipelines that were in \"running\" status (crash recovery).\nfunc (r *Runner) Start() {\n\tgo func() {\n\t\tpipelines, err := r.db.GetPipelinesByStatus(\"running\")\n\t\tif err != nil {\n\t\t\tslog.Error(\"Failed to load running pipelines for recovery\", \"error\", err)\n\t\t\treturn\n\t\t}\n\t\tfor _, p := range pipelines {\n\t\t\tif err := r.StartPipeline(p.ID); err != nil {\n\t\t\t\tslog.Error(\"Failed to resume pipeline\", \"error\", err, \"pipeline\", p.ID)\n\t\t\t\tr.db.UpdatePipelineStatus(p.ID, \"error\", err.Error())\n\t\t\t}\n\t\t}\n\t\tif len(pipelines) > 0 {\n\t\t\tslog.Info(\"Pipeline runner started\", \"resumed\", len(pipelines))\n\t\t}\n\t}()\n}\n\n// Stop gracefully stops all running pipelines.\nfunc (r *Runner) Stop() {\n\tclose(r.stopCh)\n\tr.mu.RLock()\n\tfor _, rp := range r.pipelines {\n\t\trp.Cancel()\n\t}\n\tr.mu.RUnlock()\n\n\t// Wait for all to finish with timeout\n\ttimer := time.NewTimer(30 * time.Second)\n\tdefer timer.Stop()\n\n\tr.mu.RLock()\n\tfor _, rp := range r.pipelines {\n\t\tselect {\n\t\tcase <-rp.Done:\n\t\tcase <-timer.C:\n\t\t\tslog.Warn(\"Timeout waiting for pipeline to stop\", \"pipeline\", rp.PipelineID)\n\t\t}\n\t}\n\tr.mu.RUnlock()\n}\n\n// StartPipeline starts a single pipeline by ID.\nfunc (r *Runner) StartPipeline(pipelineID string) error {\n\tr.mu.Lock()\n\tif _, exists := r.pipelines[pipelineID]; exists {\n\t\tr.mu.Unlock()\n\t\treturn fmt.Errorf(\"pipeline %s is already running\", pipelineID)\n\t}\n\tr.mu.Unlock()\n\n\t// Load pipeline + graph from DB\n\tpipeline, err := r.db.GetPipelineByID(pipelineID)\n\tif err != nil || pipeline == nil {\n\t\treturn fmt.Errorf(\"pipeline not found: %s\", pipelineID)\n\t}\n\n\tnodes, edges, err := r.db.GetPipelineGraph(pipelineID)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"load pipeline graph: %w\", err)\n\t}\n\n\t// Find source and sink nodes\n\tvar sourceNode *database.PipelineNode\n\tvar sinkNode *database.PipelineNode\n\tfor i := range nodes {\n\t\tswitch {\n\t\tcase isSourceType(nodes[i].NodeType):\n\t\t\tif sourceNode != nil {\n\t\t\t\treturn fmt.Errorf(\"pipeline has multiple source nodes\")\n\t\t\t}\n\t\t\tsourceNode = &nodes[i]\n\t\tcase nodes[i].NodeType == \"sink_clickhouse\":\n\t\t\tif sinkNode != nil {\n\t\t\t\treturn fmt.Errorf(\"pipeline has multiple sink nodes\")\n\t\t\t}\n\t\t\tsinkNode = &nodes[i]\n\t\t}\n\t}\n\n\tif sourceNode == nil {\n\t\treturn fmt.Errorf(\"pipeline has no source node\")\n\t}\n\tif sinkNode == nil {\n\t\treturn fmt.Errorf(\"pipeline has no sink node\")\n\t}\n\n\t// Validate that source connects to sink\n\tconnected := false\n\tfor _, e := range edges {\n\t\tif e.SourceNodeID == sourceNode.ID && e.TargetNodeID == sinkNode.ID {\n\t\t\tconnected = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif !connected {\n\t\treturn fmt.Errorf(\"source node is not connected to sink node\")\n\t}\n\n\t// Parse node configs\n\tsourceCfg, err := parseNodeConfig(sourceNode)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"parse source config: %w\", err)\n\t}\n\tsinkCfg, err := parseNodeConfig(sinkNode)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"parse sink config: %w\", err)\n\t}\n\n\t// Inject runtime fields into configs\n\tsinkCfg.Fields[\"connection_id\"] = pipeline.ConnectionID\n\tsourceCfg.Fields[\"pipeline_id\"] = pipelineID\n\n\t// Instantiate connectors\n\tsource, err := NewSource(sourceCfg.NodeType)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"create source connector: %w\", err)\n\t}\n\tsink := NewClickHouseSink(r.gateway, r.db, r.cfg.AppSecretKey)\n\n\t// Validate configs\n\tif err := source.Validate(sourceCfg); err != nil {\n\t\treturn fmt.Errorf(\"validate source config: %w\", err)\n\t}\n\tif err := sink.Validate(sinkCfg); err != nil {\n\t\treturn fmt.Errorf(\"validate sink config: %w\", err)\n\t}\n\n\t// Create run record\n\trunID, err := r.db.CreatePipelineRun(pipelineID, \"running\")\n\tif err != nil {\n\t\treturn fmt.Errorf(\"create pipeline run: %w\", err)\n\t}\n\n\t// Update pipeline status\n\tr.db.UpdatePipelineStatus(pipelineID, \"running\", \"\")\n\n\t// Launch goroutine\n\tctx, cancel := context.WithCancel(context.Background())\n\tmetrics := &Metrics{}\n\tdone := make(chan struct{})\n\n\trp := &RunningPipeline{\n\t\tPipelineID: pipelineID,\n\t\tRunID:      runID,\n\t\tCancel:     cancel,\n\t\tMetrics:    metrics,\n\t\tStartedAt:  time.Now(),\n\t\tDone:       done,\n\t}\n\n\tr.mu.Lock()\n\tr.pipelines[pipelineID] = rp\n\tr.mu.Unlock()\n\n\tgo r.runPipeline(ctx, rp, source, sink, sourceCfg, sinkCfg, pipeline.ConnectionID)\n\n\tr.db.CreatePipelineRunLog(runID, \"info\", \"Pipeline started\")\n\tslog.Info(\"Pipeline started\", \"pipeline\", pipelineID, \"source\", sourceCfg.NodeType, \"run\", runID)\n\treturn nil\n}\n\n// StopPipeline stops a running pipeline.\nfunc (r *Runner) StopPipeline(pipelineID string) error {\n\tr.mu.RLock()\n\trp, exists := r.pipelines[pipelineID]\n\tr.mu.RUnlock()\n\n\tif !exists {\n\t\t// Pipeline might not be running in-memory, just update DB status\n\t\tr.db.UpdatePipelineStatus(pipelineID, \"stopped\", \"\")\n\t\treturn nil\n\t}\n\n\trp.Cancel()\n\n\t// Wait for goroutine to finish with timeout\n\tselect {\n\tcase <-rp.Done:\n\tcase <-time.After(15 * time.Second):\n\t\tslog.Warn(\"Timeout waiting for pipeline to stop\", \"pipeline\", pipelineID)\n\t}\n\n\treturn nil\n}\n\n// GetRunningMetrics returns metrics for a running pipeline.\nfunc (r *Runner) GetRunningMetrics(pipelineID string) *Metrics {\n\tr.mu.RLock()\n\trp, exists := r.pipelines[pipelineID]\n\tr.mu.RUnlock()\n\tif !exists {\n\t\treturn nil\n\t}\n\treturn rp.Metrics\n}\n\n// runPipeline is the main execution loop for a single pipeline.\nfunc (r *Runner) runPipeline(ctx context.Context, rp *RunningPipeline, source SourceConnector, sink SinkConnector, sourceCfg, sinkCfg ConnectorConfig, connectionID string) {\n\tdefer close(rp.Done)\n\tdefer func() {\n\t\tr.mu.Lock()\n\t\tdelete(r.pipelines, rp.PipelineID)\n\t\tr.mu.Unlock()\n\t}()\n\n\tbatchCh := make(chan Batch, 10)\n\tvar sourceErr error\n\n\t// Start source in a goroutine\n\tgo func() {\n\t\tsourceErr = source.Start(ctx, sourceCfg, batchCh)\n\t\tclose(batchCh)\n\t}()\n\n\t// Consume batches and write to sink\n\tfor batch := range batchCh {\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\tgoto done\n\t\tdefault:\n\t\t}\n\n\t\trows, err := sink.WriteBatch(ctx, sinkCfg, batch)\n\t\tif err != nil {\n\t\t\trp.Metrics.ErrorsCount.Add(1)\n\t\t\tr.db.CreatePipelineRunLog(rp.RunID, \"error\", fmt.Sprintf(\"Write batch failed: %v\", err))\n\t\t\tslog.Error(\"Pipeline batch write failed\", \"pipeline\", rp.PipelineID, \"error\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\trp.Metrics.RowsIngested.Add(int64(rows))\n\t\trp.Metrics.BatchesSent.Add(1)\n\t\trp.Metrics.LastBatchAt.Store(time.Now())\n\n\t\t// Estimate bytes from raw JSON\n\t\tfor _, rec := range batch.Records {\n\t\t\trp.Metrics.BytesIngested.Add(int64(len(rec.RawJSON)))\n\t\t}\n\t}\n\ndone:\n\t// Finalize\n\tstatus := \"success\"\n\terrMsg := \"\"\n\tif sourceErr != nil && ctx.Err() == nil {\n\t\tstatus = \"error\"\n\t\terrMsg = sourceErr.Error()\n\t} else if ctx.Err() != nil {\n\t\tstatus = \"stopped\"\n\t}\n\n\tr.db.UpdatePipelineRun(\n\t\trp.RunID, status,\n\t\trp.Metrics.RowsIngested.Load(),\n\t\trp.Metrics.BytesIngested.Load(),\n\t\trp.Metrics.ErrorsCount.Load(),\n\t\terrMsg, \"{}\",\n\t)\n\tr.db.UpdatePipelineStatus(rp.PipelineID, status, errMsg)\n\tr.db.CreatePipelineRunLog(rp.RunID, \"info\", fmt.Sprintf(\"Pipeline %s (rows: %d, errors: %d)\", status, rp.Metrics.RowsIngested.Load(), rp.Metrics.ErrorsCount.Load()))\n\n\tslog.Info(\"Pipeline finished\", \"pipeline\", rp.PipelineID, \"status\", status, \"rows\", rp.Metrics.RowsIngested.Load())\n}\n\n// Helper functions\n\nfunc isSourceType(nodeType string) bool {\n\tswitch nodeType {\n\tcase \"source_kafka\", \"source_webhook\", \"source_database\", \"source_s3\":\n\t\treturn true\n\t}\n\treturn false\n}\n\nfunc parseNodeConfig(node *database.PipelineNode) (ConnectorConfig, error) {\n\tvar fields map[string]interface{}\n\tif err := json.Unmarshal([]byte(node.ConfigEncrypted), &fields); err != nil {\n\t\treturn ConnectorConfig{}, fmt.Errorf(\"unmarshal node config: %w\", err)\n\t}\n\treturn ConnectorConfig{\n\t\tNodeType: node.NodeType,\n\t\tFields:   fields,\n\t}, nil\n}\n"
  },
  {
    "path": "internal/pipelines/s3_source.go",
    "content": "package pipelines\n\nimport (\n\t\"bufio\"\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"io\"\n\t\"log/slog\"\n\t\"strings\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/minio/minio-go/v7\"\n\t\"github.com/minio/minio-go/v7/pkg/credentials\"\n)\n\n// S3Source polls an S3-compatible bucket for new files and parses them.\ntype S3Source struct{}\n\nfunc (s *S3Source) Type() string { return \"source_s3\" }\n\n// Validate checks S3 configuration.\nfunc (s *S3Source) Validate(cfg ConnectorConfig) error {\n\tendpoint := stringField(cfg.Fields, \"endpoint\", \"\")\n\tbucket := stringField(cfg.Fields, \"bucket\", \"\")\n\taccessKey := stringField(cfg.Fields, \"access_key\", \"\")\n\tsecretKey := stringField(cfg.Fields, \"secret_key\", \"\")\n\n\tif endpoint == \"\" {\n\t\treturn fmt.Errorf(\"endpoint is required\")\n\t}\n\tif bucket == \"\" {\n\t\treturn fmt.Errorf(\"bucket is required\")\n\t}\n\tif accessKey == \"\" {\n\t\treturn fmt.Errorf(\"access_key is required\")\n\t}\n\tif secretKey == \"\" {\n\t\treturn fmt.Errorf(\"secret_key is required\")\n\t}\n\treturn nil\n}\n\n// Start begins polling S3 for new files and sends batches to the output channel.\nfunc (s *S3Source) Start(ctx context.Context, cfg ConnectorConfig, out chan<- Batch) error {\n\tendpoint := stringField(cfg.Fields, \"endpoint\", \"\")\n\tbucket := stringField(cfg.Fields, \"bucket\", \"\")\n\tprefix := stringField(cfg.Fields, \"prefix\", \"\")\n\taccessKey := stringField(cfg.Fields, \"access_key\", \"\")\n\tsecretKey := stringField(cfg.Fields, \"secret_key\", \"\")\n\tregion := stringField(cfg.Fields, \"region\", \"us-east-1\")\n\tformat := stringField(cfg.Fields, \"format\", \"json\")\n\tpollIntervalSec := intField(cfg.Fields, \"poll_interval\", 300)\n\tuseSSL := boolField(cfg.Fields, \"use_ssl\", true)\n\tbatchSize := intField(cfg.Fields, \"batch_size\", 1000)\n\n\tclient, err := minio.New(endpoint, &minio.Options{\n\t\tCreds:  credentials.NewStaticV4(accessKey, secretKey, \"\"),\n\t\tSecure: useSSL,\n\t\tRegion: region,\n\t})\n\tif err != nil {\n\t\treturn fmt.Errorf(\"create S3 client: %w\", err)\n\t}\n\n\tslog.Info(\"S3 source started\", \"endpoint\", endpoint, \"bucket\", bucket, \"prefix\", prefix, \"format\", format)\n\n\t// Track processed files to avoid reprocessing\n\tvar processed sync.Map\n\n\tticker := time.NewTicker(time.Duration(pollIntervalSec) * time.Second)\n\tdefer ticker.Stop()\n\n\tpoll := func() error {\n\t\tobjectCh := client.ListObjects(ctx, bucket, minio.ListObjectsOptions{\n\t\t\tPrefix:    prefix,\n\t\t\tRecursive: true,\n\t\t})\n\n\t\tfor obj := range objectCh {\n\t\t\tif obj.Err != nil {\n\t\t\t\tslog.Warn(\"S3 list error\", \"error\", obj.Err)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// Skip already processed\n\t\t\tif _, seen := processed.LoadOrStore(obj.Key, true); seen {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif err := s.processFile(ctx, client, bucket, obj.Key, format, batchSize, out); err != nil {\n\t\t\t\tslog.Error(\"S3 file processing error\", \"key\", obj.Key, \"error\", err)\n\t\t\t\tprocessed.Delete(obj.Key) // Allow retry\n\t\t\t}\n\t\t}\n\n\t\treturn nil\n\t}\n\n\t// First poll\n\tif err := poll(); err != nil {\n\t\tslog.Error(\"S3 source poll error\", \"error\", err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\treturn nil\n\t\tcase <-ticker.C:\n\t\t\tif err := poll(); err != nil {\n\t\t\t\tslog.Error(\"S3 source poll error\", \"error\", err)\n\t\t\t}\n\t\t}\n\t}\n}\n\n// processFile reads and parses a single S3 object.\nfunc (s *S3Source) processFile(ctx context.Context, client *minio.Client, bucket, key, format string, batchSize int, out chan<- Batch) error {\n\tobj, err := client.GetObject(ctx, bucket, key, minio.GetObjectOptions{})\n\tif err != nil {\n\t\treturn fmt.Errorf(\"get object: %w\", err)\n\t}\n\tdefer obj.Close()\n\n\tslog.Info(\"Processing S3 file\", \"key\", key, \"format\", format)\n\n\tswitch strings.ToLower(format) {\n\tcase \"json\", \"ndjson\", \"jsonl\":\n\t\treturn s.parseNDJSON(ctx, obj, batchSize, out)\n\tcase \"csv\":\n\t\treturn s.parseCSV(ctx, obj, batchSize, out)\n\tdefault:\n\t\treturn fmt.Errorf(\"unsupported format: %s\", format)\n\t}\n}\n\n// parseNDJSON reads newline-delimited JSON.\nfunc (s *S3Source) parseNDJSON(ctx context.Context, r io.Reader, batchSize int, out chan<- Batch) error {\n\tscanner := bufio.NewScanner(r)\n\tscanner.Buffer(make([]byte, 1024*1024), 10*1024*1024) // 10MB max line\n\n\tvar buf []Record\n\tfor scanner.Scan() {\n\t\tif ctx.Err() != nil {\n\t\t\treturn nil\n\t\t}\n\t\tline := strings.TrimSpace(scanner.Text())\n\t\tif line == \"\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tvar data map[string]interface{}\n\t\tif err := json.Unmarshal([]byte(line), &data); err != nil {\n\t\t\tslog.Warn(\"S3 JSON parse error, skipping line\", \"error\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\tbuf = append(buf, Record{\n\t\t\tData:    data,\n\t\t\tRawJSON: []byte(line),\n\t\t})\n\n\t\tif len(buf) >= batchSize {\n\t\t\tselect {\n\t\t\tcase out <- Batch{Records: buf, SourceTS: time.Now()}:\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tbuf = nil\n\t\t}\n\t}\n\n\tif len(buf) > 0 {\n\t\tselect {\n\t\tcase out <- Batch{Records: buf, SourceTS: time.Now()}:\n\t\tcase <-ctx.Done():\n\t\t}\n\t}\n\n\treturn scanner.Err()\n}\n\n// parseCSV reads CSV files (first row = headers).\nfunc (s *S3Source) parseCSV(ctx context.Context, r io.Reader, batchSize int, out chan<- Batch) error {\n\tscanner := bufio.NewScanner(r)\n\tscanner.Buffer(make([]byte, 1024*1024), 10*1024*1024)\n\n\t// Read header\n\tif !scanner.Scan() {\n\t\treturn fmt.Errorf(\"empty CSV file\")\n\t}\n\theaders := strings.Split(scanner.Text(), \",\")\n\tfor i := range headers {\n\t\theaders[i] = strings.TrimSpace(headers[i])\n\t}\n\n\tvar buf []Record\n\tfor scanner.Scan() {\n\t\tif ctx.Err() != nil {\n\t\t\treturn nil\n\t\t}\n\t\tline := scanner.Text()\n\t\tif strings.TrimSpace(line) == \"\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tvalues := strings.Split(line, \",\")\n\t\tdata := make(map[string]interface{})\n\t\tfor i, h := range headers {\n\t\t\tif i < len(values) {\n\t\t\t\tdata[h] = strings.TrimSpace(values[i])\n\t\t\t} else {\n\t\t\t\tdata[h] = \"\"\n\t\t\t}\n\t\t}\n\n\t\traw, _ := json.Marshal(data)\n\t\tbuf = append(buf, Record{\n\t\t\tData:    data,\n\t\t\tRawJSON: raw,\n\t\t})\n\n\t\tif len(buf) >= batchSize {\n\t\t\tselect {\n\t\t\tcase out <- Batch{Records: buf, SourceTS: time.Now()}:\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tbuf = nil\n\t\t}\n\t}\n\n\tif len(buf) > 0 {\n\t\tselect {\n\t\tcase out <- Batch{Records: buf, SourceTS: time.Now()}:\n\t\tcase <-ctx.Done():\n\t\t}\n\t}\n\n\treturn scanner.Err()\n}\n"
  },
  {
    "path": "internal/pipelines/types.go",
    "content": "package pipelines\n\nimport (\n\t\"context\"\n\t\"sync/atomic\"\n\t\"time\"\n)\n\n// Record represents a single data record flowing through the pipeline.\ntype Record struct {\n\tData    map[string]interface{} // Column name -> value\n\tRawJSON []byte                 // Original bytes for pass-through\n}\n\n// Batch is a slice of records ready for INSERT.\ntype Batch struct {\n\tRecords  []Record\n\tSourceTS time.Time\n}\n\n// ConnectorConfig is the parsed config for a connector node.\ntype ConnectorConfig struct {\n\tNodeType string                 `json:\"node_type\"`\n\tFields   map[string]interface{} `json:\"fields\"`\n}\n\n// SourceConnector is the interface all source connectors implement.\ntype SourceConnector interface {\n\t// Validate checks configuration before pipeline start.\n\tValidate(cfg ConnectorConfig) error\n\n\t// Start begins reading data. It sends batches to the output channel.\n\t// It blocks until ctx is cancelled or an unrecoverable error occurs.\n\tStart(ctx context.Context, cfg ConnectorConfig, out chan<- Batch) error\n\n\t// Type returns the connector type identifier.\n\tType() string\n}\n\n// SinkConnector writes batches to the destination.\ntype SinkConnector interface {\n\tValidate(cfg ConnectorConfig) error\n\tWriteBatch(ctx context.Context, cfg ConnectorConfig, batch Batch) (rowsWritten int, err error)\n\tType() string\n}\n\n// Metrics tracks pipeline execution metrics (thread-safe via atomic).\ntype Metrics struct {\n\tRowsIngested  atomic.Int64\n\tBytesIngested atomic.Int64\n\tBatchesSent   atomic.Int64\n\tErrorsCount   atomic.Int64\n\tLastBatchAt   atomic.Value // time.Time\n}\n"
  },
  {
    "path": "internal/pipelines/webhook.go",
    "content": "package pipelines\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"io\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"strings\"\n\t\"sync\"\n\t\"time\"\n)\n\n// WebhookSource receives data via HTTP POST requests.\n// When started, it registers itself in a global registry so that an\n// external HTTP handler can route incoming requests to the right pipeline.\ntype WebhookSource struct{}\n\nfunc (w *WebhookSource) Type() string { return \"source_webhook\" }\n\n// Validate checks webhook configuration.\nfunc (w *WebhookSource) Validate(cfg ConnectorConfig) error {\n\t// auth_token is optional — when empty, the webhook accepts all requests\n\treturn nil\n}\n\n// Start blocks until the context is cancelled, forwarding received batches to out.\nfunc (w *WebhookSource) Start(ctx context.Context, cfg ConnectorConfig, out chan<- Batch) error {\n\tpipelineID, _ := cfg.Fields[\"pipeline_id\"].(string)\n\tauthToken, _ := cfg.Fields[\"auth_token\"].(string)\n\tbatchSize := intField(cfg.Fields, \"batch_size\", 100)\n\tbatchTimeoutMs := intField(cfg.Fields, \"batch_timeout_ms\", 5000)\n\n\t// Create a receiver for this pipeline\n\trecv := &webhookReceiver{\n\t\tauthToken: authToken,\n\t\tincoming:  make(chan Record, 1000),\n\t}\n\n\t// Register in global registry\n\twebhookRegistry.Store(pipelineID, recv)\n\tdefer webhookRegistry.Delete(pipelineID)\n\n\tslog.Info(\"Webhook source started\", \"pipeline\", pipelineID)\n\n\t// Batch accumulation loop\n\tvar buf []Record\n\tticker := time.NewTicker(time.Duration(batchTimeoutMs) * time.Millisecond)\n\tdefer ticker.Stop()\n\n\tflush := func() {\n\t\tif len(buf) == 0 {\n\t\t\treturn\n\t\t}\n\t\tbatch := Batch{\n\t\t\tRecords:  buf,\n\t\t\tSourceTS: time.Now(),\n\t\t}\n\t\tselect {\n\t\tcase out <- batch:\n\t\tcase <-ctx.Done():\n\t\t\treturn\n\t\t}\n\t\tbuf = nil\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\tflush()\n\t\t\treturn nil\n\t\tcase rec := <-recv.incoming:\n\t\t\tbuf = append(buf, rec)\n\t\t\tif len(buf) >= batchSize {\n\t\t\t\tflush()\n\t\t\t}\n\t\tcase <-ticker.C:\n\t\t\tflush()\n\t\t}\n\t}\n}\n\n// ── Webhook HTTP integration ───────────────────────────────────────\n\n// webhookRegistry maps pipeline IDs to active webhook receivers.\nvar webhookRegistry sync.Map\n\n// webhookReceiver holds the channel for a single pipeline's webhook endpoint.\ntype webhookReceiver struct {\n\tauthToken string\n\tincoming  chan Record\n}\n\n// HandleWebhook is an HTTP handler that routes incoming webhook POSTs to the\n// correct running pipeline. Mount at: POST /api/pipelines/webhook/{pipelineID}\nfunc HandleWebhook(w http.ResponseWriter, r *http.Request) {\n\t// Extract pipeline ID from URL path (last segment)\n\tparts := strings.Split(strings.TrimRight(r.URL.Path, \"/\"), \"/\")\n\tif len(parts) == 0 {\n\t\thttp.Error(w, \"missing pipeline ID\", http.StatusBadRequest)\n\t\treturn\n\t}\n\tpipelineID := parts[len(parts)-1]\n\n\tval, ok := webhookRegistry.Load(pipelineID)\n\tif !ok {\n\t\thttp.Error(w, \"pipeline not running or not a webhook pipeline\", http.StatusNotFound)\n\t\treturn\n\t}\n\trecv := val.(*webhookReceiver)\n\n\t// Authenticate (skip if no auth token configured)\n\tif recv.authToken != \"\" {\n\t\ttoken := r.Header.Get(\"Authorization\")\n\t\ttoken = strings.TrimPrefix(token, \"Bearer \")\n\t\tif token == \"\" {\n\t\t\ttoken = r.URL.Query().Get(\"token\")\n\t\t}\n\t\tif token != recv.authToken {\n\t\t\thttp.Error(w, \"unauthorized\", http.StatusUnauthorized)\n\t\t\treturn\n\t\t}\n\t}\n\n\t// Read body\n\tbody, err := io.ReadAll(io.LimitReader(r.Body, 10*1024*1024)) // 10MB limit\n\tif err != nil {\n\t\thttp.Error(w, \"failed to read body\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tcontentType := r.Header.Get(\"Content-Type\")\n\trecords, parseErr := parseWebhookBody(body, contentType)\n\tif parseErr != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"parse error: %v\", parseErr), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\t// Send records to pipeline (non-blocking with backpressure)\n\taccepted := 0\n\tfor _, rec := range records {\n\t\tselect {\n\t\tcase recv.incoming <- rec:\n\t\t\taccepted++\n\t\tdefault:\n\t\t\t// Channel full, apply backpressure\n\t\t\thttp.Error(w, \"pipeline buffer full, try again later\", http.StatusTooManyRequests)\n\t\t\treturn\n\t\t}\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(http.StatusOK)\n\tfmt.Fprintf(w, `{\"accepted\":%d}`, accepted)\n}\n\n// parseWebhookBody parses JSON or NDJSON into records.\nfunc parseWebhookBody(body []byte, contentType string) ([]Record, error) {\n\ttrimmed := strings.TrimSpace(string(body))\n\tif trimmed == \"\" {\n\t\treturn nil, fmt.Errorf(\"empty body\")\n\t}\n\n\t// Try to detect if it's an array or single object\n\tif strings.HasPrefix(trimmed, \"[\") {\n\t\t// JSON array\n\t\tvar arr []json.RawMessage\n\t\tif err := json.Unmarshal(body, &arr); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"invalid JSON array: %w\", err)\n\t\t}\n\t\tvar records []Record\n\t\tfor _, raw := range arr {\n\t\t\tvar data map[string]interface{}\n\t\t\tif err := json.Unmarshal(raw, &data); err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"invalid JSON object in array: %w\", err)\n\t\t\t}\n\t\t\trecords = append(records, Record{\n\t\t\t\tData:    data,\n\t\t\t\tRawJSON: raw,\n\t\t\t})\n\t\t}\n\t\treturn records, nil\n\t}\n\n\t// NDJSON or single object\n\tlines := strings.Split(trimmed, \"\\n\")\n\tvar records []Record\n\tfor _, line := range lines {\n\t\tline = strings.TrimSpace(line)\n\t\tif line == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tvar data map[string]interface{}\n\t\tif err := json.Unmarshal([]byte(line), &data); err != nil {\n\t\t\treturn nil, fmt.Errorf(\"invalid JSON line: %w\", err)\n\t\t}\n\t\trecords = append(records, Record{\n\t\t\tData:    data,\n\t\t\tRawJSON: []byte(line),\n\t\t})\n\t}\n\n\treturn records, nil\n}\n"
  },
  {
    "path": "internal/queryproc/variables.go",
    "content": "package queryproc\n\nimport (\n\t\"fmt\"\n\t\"math\"\n\t\"regexp\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n)\n\n// TimeRange represents a dashboard time range selection.\ntype TimeRange struct {\n\tType string `json:\"type\"` // \"relative\" or \"absolute\"\n\tFrom string `json:\"from\"`\n\tTo   string `json:\"to\"`\n}\n\n// ProcessorOptions contains all inputs for query variable interpolation.\ntype ProcessorOptions struct {\n\tQuery         string\n\tTimeRange     *TimeRange\n\tTimeField     string\n\tTimeFieldUnit string // \"ns\", \"us\", \"ms\", \"s\" - defaults to \"ms\"\n\tMaxDataPoints int    // defaults to 1000\n\tTable         string\n}\n\n// ProcessedResult contains the output of query variable interpolation.\ntype ProcessedResult struct {\n\tQuery            string         `json:\"query\"`\n\tHasTimeVariables bool           `json:\"has_time_variables\"`\n\tInterpolatedVars map[string]any `json:\"interpolated_vars\"`\n\tErrors           []string       `json:\"errors\"`\n}\n\n// Variable patterns for detection and replacement.\nvar (\n\tpatTimeFilterWithCol = regexp.MustCompile(`(?i)\\$__timeFilter\\(([^)]+)\\)`)\n\tpatTimeFilterSimple  = regexp.MustCompile(`(?i)\\$__timeFilter`)\n\tpatTimestampFilter   = regexp.MustCompile(`(?i)\\$__timestamp\\(([^)]+)\\)`)\n\tpatTimeField         = regexp.MustCompile(`(?i)\\$__timeField`)\n\tpatTimeFrom          = regexp.MustCompile(`(?i)\\$__timeFrom`)\n\tpatTimeTo            = regexp.MustCompile(`(?i)\\$__timeTo`)\n\tpatInterval          = regexp.MustCompile(`(?i)\\$__interval`)\n\tpatTable             = regexp.MustCompile(`(?i)\\$__table`)\n\n\t// Pattern for parsing relative time strings like \"5m\", \"1h\", \"7d\", \"1M\", \"1y\",\n\t// and Grafana-style forms like \"now-5m\" or \"now-15min\".\n\tpatRelativeTime = regexp.MustCompile(`^(?:now-)?\\s*(\\d+)\\s*([a-zA-Z]+)$`)\n)\n\n// HasTimeVariables checks if the query contains any supported time or table variables.\nfunc HasTimeVariables(query string) bool {\n\tif query == \"\" {\n\t\treturn false\n\t}\n\treturn patTimeFilterWithCol.MatchString(query) ||\n\t\tpatTimeFilterSimple.MatchString(query) ||\n\t\tpatTimestampFilter.MatchString(query) ||\n\t\tpatTimeField.MatchString(query) ||\n\t\tpatTimeFrom.MatchString(query) ||\n\t\tpatTimeTo.MatchString(query) ||\n\t\tpatInterval.MatchString(query) ||\n\t\tpatTable.MatchString(query)\n}\n\n// ProcessQueryVariables interpolates all dashboard variables in the given query.\nfunc ProcessQueryVariables(opts ProcessorOptions) ProcessedResult {\n\tif opts.Query == \"\" {\n\t\treturn ProcessedResult{\n\t\t\tQuery:            \"\",\n\t\t\tHasTimeVariables: false,\n\t\t\tInterpolatedVars: map[string]any{},\n\t\t\tErrors:           []string{\"Invalid query provided\"},\n\t\t}\n\t}\n\n\t// Apply defaults.\n\ttimeFieldUnit := opts.TimeFieldUnit\n\tif timeFieldUnit == \"\" {\n\t\ttimeFieldUnit = \"ms\"\n\t}\n\tmaxDataPoints := opts.MaxDataPoints\n\tif maxDataPoints <= 0 {\n\t\tmaxDataPoints = 1000\n\t}\n\n\tprocessedQuery := opts.Query\n\tinterpolatedVars := map[string]any{}\n\tvar errors []string\n\n\t// 1. Handle $__table replacement.\n\tif patTable.MatchString(processedQuery) {\n\t\tif opts.Table != \"\" {\n\t\t\tprocessedQuery = patTable.ReplaceAllString(processedQuery, opts.Table)\n\t\t\tinterpolatedVars[\"table\"] = opts.Table\n\t\t} else {\n\t\t\terrors = append(errors, \"$__table variable used but no table specified\")\n\t\t}\n\t}\n\n\t// 2. Handle $__timeField replacement.\n\tif patTimeField.MatchString(processedQuery) {\n\t\teffectiveTimeField := opts.TimeField\n\t\tif effectiveTimeField == \"\" {\n\t\t\teffectiveTimeField = \"timestamp\"\n\t\t}\n\t\tprocessedQuery = patTimeField.ReplaceAllString(processedQuery, effectiveTimeField)\n\t\tinterpolatedVars[\"timeField\"] = effectiveTimeField\n\t}\n\n\t// 3. Handle time-range-related variables.\n\tif opts.TimeRange != nil {\n\t\tfrom, to, ok := getTimeBounds(opts.TimeRange)\n\t\tif ok {\n\t\t\t// 3a. Handle $__timeFilter(column) with column name in parentheses.\n\t\t\tif matches := patTimeFilterWithCol.FindStringSubmatch(processedQuery); matches != nil {\n\t\t\t\tcolumnName := strings.TrimSpace(matches[1])\n\t\t\t\ttimeFilter := generateTimeFilter(from, to, columnName, timeFieldUnit)\n\t\t\t\tprocessedQuery = patTimeFilterWithCol.ReplaceAllString(processedQuery, timeFilter)\n\t\t\t\tinterpolatedVars[\"timeFilter\"] = timeFilter\n\t\t\t\tinterpolatedVars[\"timeFilterColumn\"] = columnName\n\t\t\t}\n\n\t\t\t// 3b. Handle $__timeFilter without column (uses timeField or \"timestamp\").\n\t\t\t// After the column variant is already replaced, only bare $__timeFilter remain.\n\t\t\tif patTimeFilterSimple.MatchString(processedQuery) {\n\t\t\t\teffectiveTimeField := opts.TimeField\n\t\t\t\tif effectiveTimeField == \"\" {\n\t\t\t\t\teffectiveTimeField = \"timestamp\"\n\t\t\t\t}\n\t\t\t\ttimeFilter := generateTimeFilter(from, to, effectiveTimeField, timeFieldUnit)\n\t\t\t\tprocessedQuery = patTimeFilterSimple.ReplaceAllString(processedQuery, timeFilter)\n\t\t\t\tinterpolatedVars[\"timeFilter\"] = timeFilter\n\t\t\t}\n\n\t\t\t// 3c. Handle $__timeFrom.\n\t\t\tif patTimeFrom.MatchString(processedQuery) {\n\t\t\t\tfromValue := convertToEpoch(from, timeFieldUnit)\n\t\t\t\tprocessedQuery = patTimeFrom.ReplaceAllString(processedQuery, strconv.FormatInt(fromValue, 10))\n\t\t\t\tinterpolatedVars[\"timeFrom\"] = fromValue\n\t\t\t}\n\n\t\t\t// 3d. Handle $__timeTo.\n\t\t\tif patTimeTo.MatchString(processedQuery) {\n\t\t\t\ttoValue := convertToEpoch(to, timeFieldUnit)\n\t\t\t\tprocessedQuery = patTimeTo.ReplaceAllString(processedQuery, strconv.FormatInt(toValue, 10))\n\t\t\t\tinterpolatedVars[\"timeTo\"] = toValue\n\t\t\t}\n\n\t\t\t// 3e. Handle $__interval.\n\t\t\tif patInterval.MatchString(processedQuery) {\n\t\t\t\tintervalSeconds := calculateInterval(from, to, maxDataPoints)\n\t\t\t\tprocessedQuery = patInterval.ReplaceAllString(processedQuery, strconv.Itoa(intervalSeconds))\n\t\t\t\tinterpolatedVars[\"interval\"] = intervalSeconds\n\t\t\t}\n\n\t\t\t// 3f. Handle $__timestamp(column) as a DateTime range predicate.\n\t\t\tif patTimestampFilter.MatchString(processedQuery) {\n\t\t\t\tfromSec := from.Unix()\n\t\t\t\ttoSec := to.Unix()\n\t\t\t\tprocessedQuery = patTimestampFilter.ReplaceAllStringFunc(processedQuery, func(match string) string {\n\t\t\t\t\tmatches := patTimestampFilter.FindStringSubmatch(match)\n\t\t\t\t\tif len(matches) < 2 {\n\t\t\t\t\t\treturn match\n\t\t\t\t\t}\n\t\t\t\t\tcolumnName := strings.TrimSpace(matches[1])\n\t\t\t\t\treturn fmt.Sprintf(\"(%s >= toDateTime(%d) AND %s <= toDateTime(%d))\", columnName, fromSec, columnName, toSec)\n\t\t\t\t})\n\t\t\t\tinterpolatedVars[\"timestampFrom\"] = fromSec\n\t\t\t\tinterpolatedVars[\"timestampTo\"] = toSec\n\t\t\t}\n\t\t} else {\n\t\t\terrors = append(errors, \"Invalid time range provided\")\n\t\t}\n\t} else if patTimeFilterWithCol.MatchString(processedQuery) ||\n\t\tpatTimeFilterSimple.MatchString(processedQuery) ||\n\t\tpatTimeFrom.MatchString(processedQuery) ||\n\t\tpatTimeTo.MatchString(processedQuery) ||\n\t\tpatInterval.MatchString(processedQuery) ||\n\t\tpatTimestampFilter.MatchString(processedQuery) {\n\t\terrors = append(errors, \"Time-range variables found but no time range was provided\")\n\t}\n\n\treturn ProcessedResult{\n\t\tQuery:            processedQuery,\n\t\tHasTimeVariables: HasTimeVariables(opts.Query),\n\t\tInterpolatedVars: interpolatedVars,\n\t\tErrors:           errors,\n\t}\n}\n\n// InferTimeUnit infers the time unit from a column name suffix.\n// Returns \"ns\", \"us\", \"ms\", or \"s\". Defaults to \"ms\".\nfunc InferTimeUnit(columnName string) string {\n\tlower := strings.ToLower(columnName)\n\n\tif strings.HasSuffix(lower, \"_ns\") || strings.Contains(lower, \"_ns_\") {\n\t\treturn \"ns\"\n\t}\n\tif strings.HasSuffix(lower, \"_us\") || strings.Contains(lower, \"_us_\") {\n\t\treturn \"us\"\n\t}\n\tif strings.HasSuffix(lower, \"_ms\") || strings.Contains(lower, \"_ms_\") {\n\t\treturn \"ms\"\n\t}\n\tif strings.HasSuffix(lower, \"_s\") && !strings.HasSuffix(lower, \"_ms\") {\n\t\treturn \"s\"\n\t}\n\n\treturn \"ms\"\n}\n\n// getTimeBounds parses a TimeRange into concrete from/to time.Time values.\nfunc getTimeBounds(tr *TimeRange) (from, to time.Time, ok bool) {\n\tnow := time.Now()\n\n\tif tr.Type == \"relative\" {\n\t\ttoToken := strings.TrimSpace(tr.To)\n\t\tif toToken == \"\" {\n\t\t\ttoToken = \"now\"\n\t\t}\n\t\tfromToken := strings.TrimSpace(tr.From)\n\t\tif fromToken == \"\" {\n\t\t\tfromToken = \"1h\"\n\t\t}\n\n\t\tto = parseRelativeTime(toToken, now)\n\t\tfrom = parseRelativeTime(fromToken, now)\n\t} else {\n\t\tfrom = parseAbsoluteTime(tr.From)\n\t\tto = parseAbsoluteTime(tr.To)\n\t}\n\n\tif from.IsZero() || to.IsZero() {\n\t\treturn time.Time{}, time.Time{}, false\n\t}\n\n\tif from.After(to) {\n\t\tfrom, to = to, from\n\t}\n\n\treturn from, to, true\n}\n\n// parseRelativeTime parses a relative time string like \"5m\", \"1h\", \"7d\", \"1M\", \"1y\"\n// as an offset subtracted from the base time. \"now\" returns the base time unchanged.\nfunc parseRelativeTime(timeStr string, base time.Time) time.Time {\n\ttrimmed := strings.ToLower(strings.TrimSpace(timeStr))\n\tif trimmed == \"\" {\n\t\treturn base.Add(-5 * time.Minute)\n\t}\n\tif trimmed == \"now\" {\n\t\treturn base\n\t}\n\n\tmatches := patRelativeTime.FindStringSubmatch(trimmed)\n\tif matches == nil {\n\t\t// Default to 5 minutes ago on invalid input.\n\t\treturn base.Add(-5 * time.Minute)\n\t}\n\n\tvalue, _ := strconv.Atoi(matches[1])\n\tunit := matches[2]\n\tswitch unit {\n\tcase \"s\", \"sec\", \"secs\", \"second\", \"seconds\":\n\t\tunit = \"s\"\n\tcase \"m\", \"min\", \"mins\", \"minute\", \"minutes\":\n\t\tunit = \"m\"\n\tcase \"h\", \"hr\", \"hrs\", \"hour\", \"hours\":\n\t\tunit = \"h\"\n\tcase \"d\", \"day\", \"days\":\n\t\tunit = \"d\"\n\tcase \"w\", \"week\", \"weeks\":\n\t\tunit = \"w\"\n\tcase \"mo\", \"mon\", \"month\", \"months\", \"mth\":\n\t\tunit = \"M\"\n\tcase \"y\", \"yr\", \"yrs\", \"year\", \"years\":\n\t\tunit = \"y\"\n\t}\n\n\tswitch unit {\n\tcase \"s\":\n\t\treturn base.Add(-time.Duration(value) * time.Second)\n\tcase \"m\":\n\t\treturn base.Add(-time.Duration(value) * time.Minute)\n\tcase \"h\":\n\t\treturn base.Add(-time.Duration(value) * time.Hour)\n\tcase \"d\":\n\t\treturn base.Add(-time.Duration(value) * 24 * time.Hour)\n\tcase \"w\":\n\t\treturn base.Add(-time.Duration(value) * 7 * 24 * time.Hour)\n\tcase \"M\":\n\t\t// Approximate month as 30 days.\n\t\treturn base.Add(-time.Duration(value) * 30 * 24 * time.Hour)\n\tcase \"y\":\n\t\t// Approximate year as 365 days.\n\t\treturn base.Add(-time.Duration(value) * 365 * 24 * time.Hour)\n\tdefault:\n\t\treturn base.Add(-5 * time.Minute)\n\t}\n}\n\n// parseAbsoluteTime attempts to parse a time string in common formats.\nfunc parseAbsoluteTime(s string) time.Time {\n\t// Try RFC3339 first (most common for API payloads).\n\tif t, err := time.Parse(time.RFC3339, s); err == nil {\n\t\treturn t\n\t}\n\t// Try RFC3339Nano.\n\tif t, err := time.Parse(time.RFC3339Nano, s); err == nil {\n\t\treturn t\n\t}\n\t// Try date-only format.\n\tif t, err := time.Parse(\"2006-01-02\", s); err == nil {\n\t\treturn t\n\t}\n\t// Try datetime without timezone.\n\tif t, err := time.Parse(\"2006-01-02 15:04:05\", s); err == nil {\n\t\treturn t\n\t}\n\t// Try datetime-local without timezone offset.\n\tif t, err := time.Parse(\"2006-01-02T15:04\", s); err == nil {\n\t\treturn t\n\t}\n\tif t, err := time.Parse(\"2006-01-02T15:04:05\", s); err == nil {\n\t\treturn t\n\t}\n\treturn time.Time{}\n}\n\n// generateTimeFilter builds a SQL time filter condition for the given column.\nfunc generateTimeFilter(from, to time.Time, columnName, timeUnit string) string {\n\tfromValue := convertToEpoch(from, timeUnit)\n\ttoValue := convertToEpoch(to, timeUnit)\n\treturn fmt.Sprintf(\"%s >= %d AND %s <= %d\", columnName, fromValue, columnName, toValue)\n}\n\n// convertToEpoch converts a time.Time to an epoch value in the specified unit.\nfunc convertToEpoch(t time.Time, unit string) int64 {\n\tswitch unit {\n\tcase \"ns\":\n\t\treturn t.UnixNano()\n\tcase \"us\":\n\t\treturn t.UnixMicro()\n\tcase \"ms\":\n\t\treturn t.UnixMilli()\n\tcase \"s\":\n\t\treturn t.Unix()\n\tdefault:\n\t\treturn t.UnixMilli()\n\t}\n}\n\n// calculateInterval computes the aggregation interval in seconds for a given\n// time span and maximum number of data points.\nfunc calculateInterval(from, to time.Time, maxDataPoints int) int {\n\tdurationMs := to.Sub(from).Milliseconds()\n\tintervalMs := float64(durationMs) / float64(maxDataPoints)\n\tif intervalMs < 1000 {\n\t\tintervalMs = 1000\n\t}\n\treturn int(math.Floor(intervalMs / 1000))\n}\n"
  },
  {
    "path": "internal/queryproc/variables_test.go",
    "content": "package queryproc\n\nimport (\n\t\"strings\"\n\t\"testing\"\n\t\"time\"\n)\n\nfunc TestParseRelativeTime_NowMinusMinutes(t *testing.T) {\n\tbase := time.Date(2026, 2, 12, 12, 0, 0, 0, time.UTC)\n\n\tcases := []string{\"now-5m\", \"now-5min\", \"5m\", \"5minutes\"}\n\tfor _, tc := range cases {\n\t\tgot := parseRelativeTime(tc, base)\n\t\twant := base.Add(-5 * time.Minute)\n\t\tif got.Unix() != want.Unix() {\n\t\t\tt.Fatalf(\"%s: expected %v, got %v\", tc, want, got)\n\t\t}\n\t}\n}\n\nfunc TestGetTimeBounds_RelativeRangeWithCustomTo(t *testing.T) {\n\tfrom, to, ok := getTimeBounds(&TimeRange{\n\t\tType: \"relative\",\n\t\tFrom: \"now-15m\",\n\t\tTo:   \"now-5m\",\n\t})\n\tif !ok {\n\t\tt.Fatalf(\"expected valid range\")\n\t}\n\tif !from.Before(to) {\n\t\tt.Fatalf(\"expected from < to, got from=%v to=%v\", from, to)\n\t}\n}\n\nfunc TestProcessQueryVariables_TimestampMacroWithRelativeExpression(t *testing.T) {\n\tout := ProcessQueryVariables(ProcessorOptions{\n\t\tQuery: \"SELECT count() FROM x WHERE $__timestamp(event_time)\",\n\t\tTimeRange: &TimeRange{\n\t\t\tType: \"relative\",\n\t\t\tFrom: \"now-1h\",\n\t\t\tTo:   \"now\",\n\t\t},\n\t})\n\n\tif len(out.Errors) > 0 {\n\t\tt.Fatalf(\"unexpected errors: %+v\", out.Errors)\n\t}\n\tif strings.Contains(out.Query, \"$__timestamp\") {\n\t\tt.Fatalf(\"timestamp macro was not replaced: %s\", out.Query)\n\t}\n}\n"
  },
  {
    "path": "internal/scheduler/cron.go",
    "content": "package scheduler\n\nimport (\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n)\n\n// parseField parses a single cron field (e.g. \"*/5\", \"1-15\", \"1,5,10\", \"*\")\n// and returns a set of matching integer values within [min, max].\nfunc parseField(field string, min, max int) map[int]bool {\n\tvalues := make(map[int]bool)\n\tparts := strings.Split(field, \",\")\n\n\tfor _, part := range parts {\n\t\trangePart := part\n\t\tstep := 1\n\n\t\tif idx := strings.Index(part, \"/\"); idx >= 0 {\n\t\t\trangePart = part[:idx]\n\t\t\ts, err := strconv.Atoi(part[idx+1:])\n\t\t\tif err != nil || s <= 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tstep = s\n\t\t}\n\n\t\tswitch {\n\t\tcase rangePart == \"*\":\n\t\t\t// Every value from min to max, filtered by step.\n\t\t\tfor v := min; v <= max; v++ {\n\t\t\t\tif (v-min)%step == 0 {\n\t\t\t\t\tvalues[v] = true\n\t\t\t\t}\n\t\t\t}\n\n\t\tcase strings.Contains(rangePart, \"-\"):\n\t\t\tbounds := strings.SplitN(rangePart, \"-\", 2)\n\t\t\ts, err1 := strconv.Atoi(bounds[0])\n\t\t\te, err2 := strconv.Atoi(bounds[1])\n\t\t\tif err1 != nil || err2 != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tfor v := s; v <= e; v++ {\n\t\t\t\tif (v-s)%step == 0 {\n\t\t\t\t\tvalues[v] = true\n\t\t\t\t}\n\t\t\t}\n\n\t\tdefault:\n\t\t\tnum, err := strconv.Atoi(rangePart)\n\t\t\tif err == nil {\n\t\t\t\tvalues[num] = true\n\t\t\t}\n\t\t}\n\t}\n\n\treturn values\n}\n\n// ComputeNextRun parses a standard 5-field cron expression (minute hour dom month dow)\n// and returns the next matching UTC time after `from`, iterating minute by minute\n// up to 1 year ahead. Returns nil if no match is found.\nfunc ComputeNextRun(cron string, from time.Time) *time.Time {\n\tfields := strings.Fields(strings.TrimSpace(cron))\n\tif len(fields) != 5 {\n\t\treturn nil\n\t}\n\n\tminutes := parseField(fields[0], 0, 59)\n\thours := parseField(fields[1], 0, 23)\n\tdom := parseField(fields[2], 1, 31)\n\tmonths := parseField(fields[3], 1, 12)\n\tdow := parseField(fields[4], 0, 6)\n\n\tif len(minutes) == 0 || len(hours) == 0 || len(dom) == 0 || len(months) == 0 || len(dow) == 0 {\n\t\treturn nil\n\t}\n\n\t// Start from the next minute after `from`, truncated to the minute boundary.\n\tnext := from.UTC().Truncate(time.Minute).Add(time.Minute)\n\n\t// 525600 minutes = 1 year\n\tfor i := 0; i < 525600; i++ {\n\t\tm := next.Minute()\n\t\th := next.Hour()\n\t\td := next.Day()\n\t\tmo := int(next.Month())\n\t\tdw := int(next.Weekday()) // Sunday=0\n\n\t\tif minutes[m] && hours[h] && dom[d] && months[mo] && dow[dw] {\n\t\t\tresult := next\n\t\t\treturn &result\n\t\t}\n\n\t\tnext = next.Add(time.Minute)\n\t}\n\n\treturn nil\n}\n\n// ValidateCron returns true if the cron expression is syntactically valid\n// and can produce at least one future run time.\nfunc ValidateCron(cron string) bool {\n\treturn ComputeNextRun(cron, time.Now()) != nil\n}\n"
  },
  {
    "path": "internal/scheduler/runner.go",
    "content": "package scheduler\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/alerts\"\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n)\n\nconst (\n\ttickInterval  = 30 * time.Second\n\tmaxConcurrent = 3\n)\n\n// Runner executes due scheduled jobs on a 30-second tick interval.\ntype Runner struct {\n\tdb      *database.DB\n\tgateway *tunnel.Gateway\n\tsecret  string\n\tstopCh  chan struct{}\n}\n\n// NewRunner creates a new schedule runner.\nfunc NewRunner(db *database.DB, gw *tunnel.Gateway, secret string) *Runner {\n\treturn &Runner{\n\t\tdb:      db,\n\t\tgateway: gw,\n\t\tsecret:  secret,\n\t\tstopCh:  make(chan struct{}),\n\t}\n}\n\n// Start begins the runner goroutine that ticks every 30 seconds.\nfunc (r *Runner) Start() {\n\tgo func() {\n\t\tslog.Info(\"Schedule runner started\", \"interval\", tickInterval)\n\t\tticker := time.NewTicker(tickInterval)\n\t\tdefer ticker.Stop()\n\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-r.stopCh:\n\t\t\t\tslog.Info(\"Schedule runner stopped\")\n\t\t\t\treturn\n\t\t\tcase <-ticker.C:\n\t\t\t\tr.tick()\n\t\t\t}\n\t\t}\n\t}()\n}\n\n// Stop signals the runner goroutine to stop.\nfunc (r *Runner) Stop() {\n\tclose(r.stopCh)\n}\n\n// tick fetches due jobs from SQLite and executes them concurrently.\nfunc (r *Runner) tick() {\n\tschedules, err := r.db.GetEnabledSchedules()\n\tif err != nil {\n\t\tslog.Error(\"Failed to load enabled schedules\", \"error\", err)\n\t\treturn\n\t}\n\n\tnow := time.Now().UTC()\n\tvar due []database.Schedule\n\tfor _, s := range schedules {\n\t\tif s.NextRunAt == nil {\n\t\t\tcontinue\n\t\t}\n\t\tnextRun, err := time.Parse(time.RFC3339, *s.NextRunAt)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tif nextRun.After(now) {\n\t\t\tcontinue\n\t\t}\n\t\tdue = append(due, s)\n\t}\n\n\tif len(due) == 0 {\n\t\treturn\n\t}\n\n\tslog.Info(\"Processing due scheduled jobs\", \"count\", len(due))\n\n\tsem := make(chan struct{}, maxConcurrent)\n\tvar wg sync.WaitGroup\n\n\tfor _, schedule := range due {\n\t\twg.Add(1)\n\t\tsem <- struct{}{}\n\n\t\tgo func(s database.Schedule) {\n\t\t\tdefer wg.Done()\n\t\t\tdefer func() { <-sem }()\n\t\t\tr.runSchedule(s)\n\t\t}(schedule)\n\t}\n\n\twg.Wait()\n}\n\nfunc (r *Runner) runSchedule(schedule database.Schedule) {\n\t// Create a run record\n\trunID, err := r.db.CreateScheduleRun(schedule.ID, \"running\")\n\tif err != nil {\n\t\tslog.Error(\"Failed to create schedule run\", \"error\", err, \"schedule\", schedule.ID)\n\t}\n\n\tstart := time.Now()\n\tstatus := \"success\"\n\tvar runError string\n\trowCount := 0\n\tconnectionID := \"\"\n\n\tdefer func() {\n\t\telapsed := int(time.Since(start).Milliseconds())\n\n\t\t// Update run record\n\t\tif runID != \"\" {\n\t\t\tr.db.UpdateScheduleRun(runID, status, rowCount, elapsed, runError)\n\t\t}\n\n\t\t// Update schedule status\n\t\tvar nextRun *time.Time\n\t\tif schedule.Enabled {\n\t\t\tnextRun = ComputeNextRun(schedule.Cron, time.Now().UTC())\n\t\t}\n\t\tr.db.UpdateScheduleStatus(schedule.ID, status, runError, nextRun)\n\n\t\t// Audit log\n\t\tdetails := fmt.Sprintf(\"schedule=%s status=%s elapsed=%dms\", schedule.Name, status, elapsed)\n\t\tr.db.CreateAuditLog(database.AuditLogParams{\n\t\t\tAction:       \"schedule.run\",\n\t\t\tConnectionID: schedule.ConnectionID,\n\t\t\tDetails:      &details,\n\t\t})\n\n\t\tslog.Info(\"Scheduled job completed\",\n\t\t\t\"schedule\", schedule.ID,\n\t\t\t\"name\", schedule.Name,\n\t\t\t\"status\", status,\n\t\t\t\"elapsed_ms\", elapsed,\n\t\t)\n\n\t\tif status == \"error\" {\n\t\t\tfingerprint := fmt.Sprintf(\"schedule:%s:error\", schedule.ID)\n\t\t\tpayload := map[string]interface{}{\n\t\t\t\t\"schedule_id\":   schedule.ID,\n\t\t\t\t\"schedule_name\": schedule.Name,\n\t\t\t\t\"run_id\":        runID,\n\t\t\t\t\"elapsed_ms\":    elapsed,\n\t\t\t\t\"error\":         runError,\n\t\t\t\t\"row_count\":     rowCount,\n\t\t\t}\n\t\t\tconnPtr := nullableConnectionID(connectionID)\n\t\t\tif _, alertErr := r.db.CreateAlertEvent(\n\t\t\t\tconnPtr,\n\t\t\t\talerts.EventTypeScheduleFailed,\n\t\t\t\talerts.SeverityError,\n\t\t\t\tfmt.Sprintf(\"Scheduled query failed: %s\", schedule.Name),\n\t\t\t\trunError,\n\t\t\t\tpayload,\n\t\t\t\tfingerprint,\n\t\t\t\trunID,\n\t\t\t); alertErr != nil {\n\t\t\t\tslog.Warn(\"Failed to create schedule failure alert event\", \"schedule\", schedule.ID, \"error\", alertErr)\n\t\t\t}\n\t\t} else if status == \"success\" {\n\t\t\tthreshold := int(float64(maxInt(schedule.TimeoutMs, 60000)) * 0.8)\n\t\t\tif threshold < 5000 {\n\t\t\t\tthreshold = 5000\n\t\t\t}\n\t\t\tif elapsed >= threshold {\n\t\t\t\tfingerprint := fmt.Sprintf(\"schedule:%s:slow\", schedule.ID)\n\t\t\t\tpayload := map[string]interface{}{\n\t\t\t\t\t\"schedule_id\":       schedule.ID,\n\t\t\t\t\t\"schedule_name\":     schedule.Name,\n\t\t\t\t\t\"run_id\":            runID,\n\t\t\t\t\t\"elapsed_ms\":        elapsed,\n\t\t\t\t\t\"slow_threshold_ms\": threshold,\n\t\t\t\t\t\"timeout_ms\":        schedule.TimeoutMs,\n\t\t\t\t\t\"row_count\":         rowCount,\n\t\t\t\t}\n\t\t\t\tconnPtr := nullableConnectionID(connectionID)\n\t\t\t\tif _, alertErr := r.db.CreateAlertEvent(\n\t\t\t\t\tconnPtr,\n\t\t\t\t\talerts.EventTypeScheduleSlow,\n\t\t\t\t\talerts.SeverityWarn,\n\t\t\t\t\tfmt.Sprintf(\"Scheduled query slow run: %s\", schedule.Name),\n\t\t\t\t\tfmt.Sprintf(\"Run took %dms (threshold %dms)\", elapsed, threshold),\n\t\t\t\t\tpayload,\n\t\t\t\t\tfingerprint,\n\t\t\t\t\trunID,\n\t\t\t\t); alertErr != nil {\n\t\t\t\t\tslog.Warn(\"Failed to create schedule slow alert event\", \"schedule\", schedule.ID, \"error\", alertErr)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}()\n\n\t// Fetch the saved query from SQLite\n\tsavedQuery, err := r.db.GetSavedQueryByID(schedule.SavedQueryID)\n\tif err != nil {\n\t\tstatus = \"error\"\n\t\trunError = fmt.Sprintf(\"failed to fetch saved query: %v\", err)\n\t\treturn\n\t}\n\tif savedQuery == nil {\n\t\tstatus = \"error\"\n\t\trunError = \"saved query not found\"\n\t\treturn\n\t}\n\n\t// Determine connection ID\n\tif schedule.ConnectionID != nil && *schedule.ConnectionID != \"\" {\n\t\tconnectionID = *schedule.ConnectionID\n\t} else if savedQuery.ConnectionID != nil && *savedQuery.ConnectionID != \"\" {\n\t\tconnectionID = *savedQuery.ConnectionID\n\t}\n\n\tif connectionID == \"\" {\n\t\tstatus = \"error\"\n\t\trunError = \"no connection ID configured for schedule or saved query\"\n\t\treturn\n\t}\n\n\t// Check that the tunnel is online\n\tif !r.gateway.IsTunnelOnline(connectionID) {\n\t\tstatus = \"error\"\n\t\trunError = \"tunnel not connected\"\n\t\treturn\n\t}\n\n\t// Find credentials from an active session for this connection\n\tuser, password, credErr := r.findCredentials(connectionID)\n\tif credErr != nil {\n\t\tstatus = \"error\"\n\t\trunError = fmt.Sprintf(\"no credentials available: %v\", credErr)\n\t\treturn\n\t}\n\n\t// Execute the query\n\ttimeout := time.Duration(schedule.TimeoutMs) * time.Millisecond\n\tif timeout <= 0 {\n\t\ttimeout = 60 * time.Second\n\t}\n\n\tresult, execErr := r.gateway.ExecuteQuery(connectionID, savedQuery.Query, user, password, timeout)\n\tif execErr != nil {\n\t\tstatus = \"error\"\n\t\trunError = execErr.Error()\n\t\treturn\n\t}\n\n\trowCount = countRows(result)\n}\n\nfunc nullableConnectionID(connectionID string) *string {\n\tif connectionID == \"\" {\n\t\treturn nil\n\t}\n\tid := connectionID\n\treturn &id\n}\n\nfunc maxInt(a, b int) int {\n\tif a > b {\n\t\treturn a\n\t}\n\treturn b\n}\n\n// findCredentials looks for active session credentials for a connection.\nfunc (r *Runner) findCredentials(connectionID string) (string, string, error) {\n\tsessions, err := r.db.GetActiveSessionsByConnection(connectionID, 3)\n\tif err != nil {\n\t\treturn \"\", \"\", fmt.Errorf(\"failed to load sessions: %w\", err)\n\t}\n\n\tfor _, s := range sessions {\n\t\tpassword, err := crypto.Decrypt(s.EncryptedPassword, r.secret)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\treturn s.ClickhouseUser, password, nil\n\t}\n\n\treturn \"\", \"\", fmt.Errorf(\"no active sessions with valid credentials for connection %s\", connectionID)\n}\n\n// countRows counts rows in a query result.\nfunc countRows(result *tunnel.QueryResult) int {\n\tif result == nil || len(result.Data) == 0 {\n\t\treturn 0\n\t}\n\tvar rows []json.RawMessage\n\tif err := json.Unmarshal(result.Data, &rows); err != nil {\n\t\treturn 0\n\t}\n\treturn len(rows)\n}\n"
  },
  {
    "path": "internal/server/handlers/admin.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/go-chi/chi/v5\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/governance\"\n\t\"github.com/caioricciuti/ch-ui/internal/langfuse\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n)\n\n// AdminHandler handles admin-only routes for ClickHouse management.\n// All routes require the admin role, enforced by middleware.RequireAdmin.\ntype AdminHandler struct {\n\tDB        *database.DB\n\tGateway   *tunnel.Gateway\n\tConfig    *config.Config\n\tLangfuse  *langfuse.Client\n\tGovSyncer *governance.Syncer\n}\n\n// Routes registers all admin routes on the given chi.Router.\nfunc (h *AdminHandler) Routes(r chi.Router) {\n\tr.Use(middleware.RequireAdmin(h.DB))\n\n\tr.Get(\"/users\", h.GetUsers)\n\tr.Get(\"/user-roles\", h.GetUserRoles)\n\tr.Put(\"/user-roles/{username}\", h.SetUserRole)\n\tr.Delete(\"/user-roles/{username}\", h.DeleteUserRole)\n\tr.Get(\"/connections\", h.GetConnections)\n\tr.Get(\"/stats\", h.GetStats)\n\tr.Get(\"/clickhouse-users\", h.GetClickHouseUsers)\n\tr.Post(\"/clickhouse-users\", h.CreateClickHouseUser)\n\tr.Put(\"/clickhouse-users/{username}/password\", h.UpdateClickHouseUserPassword)\n\tr.Delete(\"/clickhouse-users/{username}\", h.DeleteClickHouseUser)\n\n\t// Brain admin management\n\tr.Get(\"/brain/providers\", h.ListBrainProviders)\n\tr.Post(\"/brain/providers\", h.CreateBrainProvider)\n\tr.Put(\"/brain/providers/{id}\", h.UpdateBrainProvider)\n\tr.Delete(\"/brain/providers/{id}\", h.DeleteBrainProvider)\n\tr.Post(\"/brain/providers/{id}/sync-models\", h.SyncBrainProviderModels)\n\tr.Get(\"/brain/models\", h.ListBrainModels)\n\tr.Put(\"/brain/models/{id}\", h.UpdateBrainModel)\n\tr.Post(\"/brain/models/bulk\", h.BulkUpdateBrainModels)\n\tr.Get(\"/brain/skills\", h.ListBrainSkills)\n\tr.Post(\"/brain/skills\", h.CreateBrainSkill)\n\tr.Put(\"/brain/skills/{id}\", h.UpdateBrainSkill)\n\n\t// Langfuse observability\n\tr.Get(\"/langfuse\", h.GetLangfuseConfig)\n\tr.Put(\"/langfuse\", h.UpdateLangfuseConfig)\n\tr.Delete(\"/langfuse\", h.DeleteLangfuseConfig)\n\tr.Post(\"/langfuse/test\", h.TestLangfuseConnection)\n\n\t// Governance feature toggle (Pro; stays admin-only, not Pro-gated at this\n\t// level so admins can inspect/disable the toggle even when the license\n\t// lapses — the syncer itself is Pro-gated at startup).\n\tr.Get(\"/governance/settings\", h.GetGovernanceSettings)\n\tr.Put(\"/governance/settings\", h.UpdateGovernanceSettings)\n}\n\n// ---------- GET /users ----------\n\nfunc (h *AdminHandler) GetUsers(w http.ResponseWriter, r *http.Request) {\n\tusers, err := h.DB.GetUsers()\n\tif err != nil {\n\t\tslog.Error(\"Failed to get users\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve users\"})\n\t\treturn\n\t}\n\n\troleOverrides, err := h.DB.GetAllUserRoles()\n\tif err != nil {\n\t\tslog.Error(\"Failed to get user role overrides\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve user role overrides\"})\n\t\treturn\n\t}\n\n\tuserMap := make(map[string]database.SessionUser, len(users)+len(roleOverrides))\n\tfor _, u := range users {\n\t\tuserMap[u.Username] = u\n\t}\n\tfor _, ov := range roleOverrides {\n\t\tif _, exists := userMap[ov.Username]; exists {\n\t\t\tcontinue\n\t\t}\n\t\tuserMap[ov.Username] = database.SessionUser{\n\t\t\tUsername:     ov.Username,\n\t\t\tUserRole:     ov.Role,\n\t\t\tLastLogin:    \"\",\n\t\t\tSessionCount: 0,\n\t\t}\n\t}\n\n\tappUsers := make([]database.SessionUser, 0, len(userMap))\n\tfor _, u := range userMap {\n\t\tappUsers = append(appUsers, u)\n\t}\n\n\tincludeStale := false\n\tswitch strings.ToLower(strings.TrimSpace(r.URL.Query().Get(\"include_stale\"))) {\n\tcase \"1\", \"true\", \"yes\":\n\t\tincludeStale = true\n\t}\n\n\texistsMap, err := h.fetchCurrentClickHouseUsers(r)\n\tif err != nil {\n\t\tslog.Warn(\"Admin users: failed to compare with current ClickHouse users\", \"error\", err)\n\t\t// Fallback to app-local users only if ClickHouse comparison fails.\n\t\tif appUsers == nil {\n\t\t\tappUsers = []database.SessionUser{}\n\t\t}\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"users\": appUsers,\n\t\t\t\"sync\":  map[string]bool{\"clickhouse_user_check\": false},\n\t\t})\n\t\treturn\n\t}\n\n\ttype responseUser struct {\n\t\tdatabase.SessionUser\n\t\tExistsInClickHouse bool `json:\"exists_in_clickhouse\"`\n\t}\n\n\tfiltered := make([]responseUser, 0, len(appUsers))\n\tfor _, u := range appUsers {\n\t\texists := existsMap[u.Username]\n\t\tif !includeStale && !exists {\n\t\t\tcontinue\n\t\t}\n\t\tfiltered = append(filtered, responseUser{\n\t\t\tSessionUser:        u,\n\t\t\tExistsInClickHouse: exists,\n\t\t})\n\t}\n\n\tif filtered == nil {\n\t\tfiltered = []responseUser{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"users\": filtered,\n\t\t\"sync\": map[string]bool{\n\t\t\t\"clickhouse_user_check\": true,\n\t\t},\n\t})\n}\n\nfunc (h *AdminHandler) fetchCurrentClickHouseUsers(r *http.Request) (map[string]bool, error) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\treturn nil, fmt.Errorf(\"not authenticated\")\n\t}\n\tif !h.Gateway.IsTunnelOnline(session.ConnectionID) {\n\t\treturn nil, fmt.Errorf(\"tunnel offline\")\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"decrypt credentials: %w\", err)\n\t}\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\t\"SELECT name FROM system.users ORDER BY name FORMAT JSON\",\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t20*time.Second,\n\t)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar rows []map[string]interface{}\n\tif err := json.Unmarshal(result.Data, &rows); err != nil {\n\t\treturn nil, fmt.Errorf(\"parse system.users result: %w\", err)\n\t}\n\n\texists := make(map[string]bool, len(rows))\n\tfor _, row := range rows {\n\t\tif name, ok := row[\"name\"].(string); ok && strings.TrimSpace(name) != \"\" {\n\t\t\texists[name] = true\n\t\t}\n\t}\n\treturn exists, nil\n}\n\n// ---------- GET /user-roles ----------\n\nfunc (h *AdminHandler) GetUserRoles(w http.ResponseWriter, r *http.Request) {\n\troles, err := h.DB.GetAllUserRoles()\n\tif err != nil {\n\t\tslog.Error(\"Failed to get user roles\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve user roles\"})\n\t\treturn\n\t}\n\n\tif roles == nil {\n\t\troles = []database.UserRole{}\n\t}\n\n\twriteJSON(w, http.StatusOK, roles)\n}\n\n// ---------- PUT /user-roles/{username} ----------\n\nfunc (h *AdminHandler) SetUserRole(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\n\tusername := chi.URLParam(r, \"username\")\n\tif username == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Username is required\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tRole string `json:\"role\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\tbody.Role = strings.ToLower(strings.TrimSpace(body.Role))\n\n\tvalidRoles := map[string]bool{\"admin\": true, \"analyst\": true, \"viewer\": true}\n\tif !validRoles[body.Role] {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Role must be one of: admin, analyst, viewer\"})\n\t\treturn\n\t}\n\n\tisTargetAdmin, err := h.DB.IsUserRole(username, \"admin\")\n\tif err != nil {\n\t\tslog.Error(\"Failed checking current role assignment\", \"error\", err, \"user\", username)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to validate current role\"})\n\t\treturn\n\t}\n\tif isTargetAdmin && body.Role != \"admin\" {\n\t\tadminCount, err := h.DB.CountUsersWithRole(\"admin\")\n\t\tif err != nil {\n\t\t\tslog.Error(\"Failed counting admins\", \"error\", err)\n\t\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to validate admin safety rule\"})\n\t\t\treturn\n\t\t}\n\t\tif adminCount <= 1 {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\n\t\t\t\t\"error\": \"Cannot remove the last admin. Assign another admin first.\",\n\t\t\t})\n\t\t\treturn\n\t\t}\n\t}\n\n\tif err := h.DB.SetUserRole(username, body.Role); err != nil {\n\t\tslog.Error(\"Failed to set user role\", \"error\", err, \"user\", username)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to set user role\"})\n\t\treturn\n\t}\n\tif err := h.DB.SetSessionsUserRole(username, body.Role); err != nil {\n\t\tslog.Warn(\"Failed to refresh active session roles after role update\", \"error\", err, \"user\", username)\n\t}\n\n\tvar actorName *string\n\tif session != nil {\n\t\tactorName = strPtr(session.ClickhouseUser)\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"user_role.set\",\n\t\tUsername:  actorName,\n\t\tDetails:   strPtr(fmt.Sprintf(\"Set role for %q to %s\", username, body.Role)),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]string{\n\t\t\"message\":  \"User role updated\",\n\t\t\"username\": username,\n\t\t\"role\":     body.Role,\n\t})\n}\n\n// ---------- DELETE /user-roles/{username} ----------\n\nfunc (h *AdminHandler) DeleteUserRole(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\n\tusername := chi.URLParam(r, \"username\")\n\tif username == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Username is required\"})\n\t\treturn\n\t}\n\n\tisTargetAdmin, err := h.DB.IsUserRole(username, \"admin\")\n\tif err != nil {\n\t\tslog.Error(\"Failed checking current role assignment\", \"error\", err, \"user\", username)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to validate current role\"})\n\t\treturn\n\t}\n\tif isTargetAdmin {\n\t\tadminCount, err := h.DB.CountUsersWithRole(\"admin\")\n\t\tif err != nil {\n\t\t\tslog.Error(\"Failed counting admins\", \"error\", err)\n\t\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to validate admin safety rule\"})\n\t\t\treturn\n\t\t}\n\t\tif adminCount <= 1 {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\n\t\t\t\t\"error\": \"Cannot remove the last admin. Assign another admin first.\",\n\t\t\t})\n\t\t\treturn\n\t\t}\n\t}\n\n\tif err := h.DB.DeleteUserRole(username); err != nil {\n\t\tslog.Error(\"Failed to delete user role\", \"error\", err, \"user\", username)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete user role\"})\n\t\treturn\n\t}\n\tif err := h.DB.SetSessionsUserRole(username, \"viewer\"); err != nil {\n\t\tslog.Warn(\"Failed to refresh active session roles after role override removal\", \"error\", err, \"user\", username)\n\t}\n\n\tvar actorName *string\n\tif session != nil {\n\t\tactorName = strPtr(session.ClickhouseUser)\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"user_role.deleted\",\n\t\tUsername:  actorName,\n\t\tDetails:   strPtr(fmt.Sprintf(\"Removed role override for %q\", username)),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]string{\n\t\t\"message\":  \"User role override removed\",\n\t\t\"username\": username,\n\t})\n}\n\n// ---------- GET /connections ----------\n\nfunc (h *AdminHandler) GetConnections(w http.ResponseWriter, r *http.Request) {\n\tconns, err := h.DB.GetConnections()\n\tif err != nil {\n\t\tslog.Error(\"Failed to list connections\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve connections\"})\n\t\treturn\n\t}\n\n\ttype connInfo struct {\n\t\tID        string  `json:\"id\"`\n\t\tName      string  `json:\"name\"`\n\t\tStatus    string  `json:\"status\"`\n\t\tOnline    bool    `json:\"online\"`\n\t\tCreatedAt string  `json:\"created_at\"`\n\t\tLastSeen  *string `json:\"last_seen_at,omitempty\"`\n\t}\n\n\tresults := make([]connInfo, 0, len(conns))\n\tfor _, c := range conns {\n\t\tresults = append(results, connInfo{\n\t\t\tID:        c.ID,\n\t\t\tName:      c.Name,\n\t\t\tStatus:    c.Status,\n\t\t\tOnline:    h.Gateway.IsTunnelOnline(c.ID),\n\t\t\tCreatedAt: c.CreatedAt,\n\t\t\tLastSeen:  c.LastSeenAt,\n\t\t})\n\t}\n\n\twriteJSON(w, http.StatusOK, results)\n}\n\n// ---------- GET /stats ----------\n\nfunc (h *AdminHandler) GetStats(w http.ResponseWriter, r *http.Request) {\n\tusers, err := h.DB.GetUsers()\n\tif err != nil {\n\t\tslog.Error(\"Failed to get users for stats\", \"error\", err)\n\t\tusers = []database.SessionUser{}\n\t}\n\n\tconns, err := h.DB.GetConnections()\n\tif err != nil {\n\t\tslog.Error(\"Failed to get connections for stats\", \"error\", err)\n\t\tconns = []database.Connection{}\n\t}\n\n\tonlineCount := 0\n\tfor _, c := range conns {\n\t\tif h.Gateway.IsTunnelOnline(c.ID) {\n\t\t\tonlineCount++\n\t\t}\n\t}\n\n\tauditLogs, err := h.DB.GetAuditLogs(1000)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get audit logs for stats\", \"error\", err)\n\t\tauditLogs = []database.AuditLog{}\n\t}\n\n\tloginCount := 0\n\tqueryCount := 0\n\tfor _, log := range auditLogs {\n\t\tswitch log.Action {\n\t\tcase \"user.login\":\n\t\t\tloginCount++\n\t\tcase \"query.execute\":\n\t\t\tqueryCount++\n\t\t}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"users_count\": len(users),\n\t\t\"connections\": len(conns),\n\t\t\"online\":      onlineCount,\n\t\t\"login_count\": loginCount,\n\t\t\"query_count\": queryCount,\n\t})\n}\n\n// ---------- GET /clickhouse-users ----------\n\nfunc (h *AdminHandler) GetClickHouseUsers(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tif !h.Gateway.IsTunnelOnline(session.ConnectionID) {\n\t\twriteJSON(w, http.StatusServiceUnavailable, map[string]string{\"error\": \"Tunnel is offline\"})\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to decrypt credentials\"})\n\t\treturn\n\t}\n\n\tquery := \"SELECT name, storage, auth_type, host_ip, host_names, default_roles_all, default_roles_list, default_roles_except FROM system.users ORDER BY name FORMAT JSON\"\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\tquery,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t30*time.Second,\n\t)\n\tif err != nil {\n\t\tslog.Warn(\"Failed to query system.users\", \"error\", err, \"connection\", session.ConnectionID)\n\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"data\": result.Data,\n\t\t\"meta\": result.Meta,\n\t})\n}\n\n// ---------- POST /clickhouse-users ----------\n\nfunc (h *AdminHandler) CreateClickHouseUser(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\tif !h.Gateway.IsTunnelOnline(session.ConnectionID) {\n\t\twriteJSON(w, http.StatusServiceUnavailable, map[string]string{\"error\": \"Tunnel is offline\"})\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to decrypt credentials\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName         string   `json:\"name\"`\n\t\tPassword     string   `json:\"password\"`\n\t\tAuthType     string   `json:\"auth_type\"`\n\t\tDefaultRoles []string `json:\"default_roles\"`\n\t\tIfNotExists  *bool    `json:\"if_not_exists\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"name is required\"})\n\t\treturn\n\t}\n\n\tauthType := strings.TrimSpace(strings.ToLower(body.AuthType))\n\tif authType == \"\" {\n\t\tif strings.TrimSpace(body.Password) == \"\" {\n\t\t\tauthType = \"no_password\"\n\t\t} else {\n\t\t\tauthType = \"sha256_password\"\n\t\t}\n\t}\n\tswitch authType {\n\tcase \"no_password\", \"plaintext_password\", \"sha256_password\", \"double_sha1_password\":\n\tdefault:\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"auth_type must be one of: no_password, plaintext_password, sha256_password, double_sha1_password\"})\n\t\treturn\n\t}\n\tif authType != \"no_password\" && strings.TrimSpace(body.Password) == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"password is required for selected auth_type\"})\n\t\treturn\n\t}\n\n\tallRoles, roleNames, parseErr := parseDefaultRolesInput(body.DefaultRoles)\n\tif parseErr != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": parseErr.Error()})\n\t\treturn\n\t}\n\n\tvar createSQL strings.Builder\n\tcreateSQL.WriteString(\"CREATE USER \")\n\tif body.IfNotExists == nil || *body.IfNotExists {\n\t\tcreateSQL.WriteString(\"IF NOT EXISTS \")\n\t}\n\tcreateSQL.WriteString(escapeIdentifier(name))\n\tcreateSQL.WriteString(buildClickHouseCreateAuthClause(authType, body.Password))\n\tcreateSQLStr := createSQL.String()\n\texecutedCommands := []string{createSQLStr}\n\n\tif _, err := h.Gateway.ExecuteQuery(session.ConnectionID, createSQLStr, session.ClickhouseUser, password, 30*time.Second); err != nil {\n\t\tslog.Warn(\"Failed to create ClickHouse user\", \"error\", err, \"connection\", session.ConnectionID, \"name\", name)\n\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": fmt.Sprintf(\"%s\\n\\nCommand:\\n%s\", err.Error(), createSQLStr)})\n\t\treturn\n\t}\n\n\tescapedRoles := make([]string, 0, len(roleNames))\n\tfor _, role := range roleNames {\n\t\tescapedRoles = append(escapedRoles, escapeIdentifier(role))\n\t}\n\n\t// Apply role membership/default role as follow-up statements for broad ClickHouse compatibility.\n\tif len(escapedRoles) > 0 {\n\t\tgrantSQL := \"GRANT \" + strings.Join(escapedRoles, \", \") + \" TO \" + escapeIdentifier(name)\n\t\texecutedCommands = append(executedCommands, grantSQL)\n\t\tif _, err := h.Gateway.ExecuteQuery(session.ConnectionID, grantSQL, session.ClickhouseUser, password, 30*time.Second); err != nil {\n\t\t\tslog.Warn(\"ClickHouse user created but role grant failed\", \"error\", err, \"connection\", session.ConnectionID, \"name\", name)\n\t\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": fmt.Sprintf(\"user created but failed to grant roles: %v\\n\\nCommand:\\n%s\", err, grantSQL)})\n\t\t\treturn\n\t\t}\n\t}\n\n\tif allRoles || len(escapedRoles) > 0 {\n\t\tdefaultRoleClause := \"ALL\"\n\t\tif !allRoles {\n\t\t\tdefaultRoleClause = strings.Join(escapedRoles, \", \")\n\t\t}\n\t\talterSQL := \"ALTER USER \" + escapeIdentifier(name) + \" DEFAULT ROLE \" + defaultRoleClause\n\t\texecutedCommands = append(executedCommands, alterSQL)\n\t\tif _, err := h.Gateway.ExecuteQuery(session.ConnectionID, alterSQL, session.ClickhouseUser, password, 30*time.Second); err != nil {\n\t\t\tslog.Warn(\"ClickHouse user created but default role assignment failed\", \"error\", err, \"connection\", session.ConnectionID, \"name\", name)\n\t\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": fmt.Sprintf(\"user created but failed to set default role: %v\\n\\nCommand:\\n%s\", err, alterSQL)})\n\t\t\treturn\n\t\t}\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"admin.clickhouse_user.created\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"name=%s auth_type=%s\", name, authType)),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\n\t\t\"success\":  true,\n\t\t\"name\":     name,\n\t\t\"command\":  createSQLStr,\n\t\t\"commands\": executedCommands,\n\t})\n}\n\n// ---------- PUT /clickhouse-users/{username}/password ----------\n\nfunc (h *AdminHandler) UpdateClickHouseUserPassword(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\tif !h.Gateway.IsTunnelOnline(session.ConnectionID) {\n\t\twriteJSON(w, http.StatusServiceUnavailable, map[string]string{\"error\": \"Tunnel is offline\"})\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to decrypt credentials\"})\n\t\treturn\n\t}\n\n\tusername := strings.TrimSpace(chi.URLParam(r, \"username\"))\n\tif username == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"username is required\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tPassword string `json:\"password\"`\n\t\tAuthType string `json:\"auth_type\"`\n\t\tIfExists *bool  `json:\"if_exists\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tauthType := strings.TrimSpace(strings.ToLower(body.AuthType))\n\tif authType == \"\" {\n\t\tauthType = \"sha256_password\"\n\t}\n\tswitch authType {\n\tcase \"no_password\", \"plaintext_password\", \"sha256_password\", \"double_sha1_password\":\n\tdefault:\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"auth_type must be one of: no_password, plaintext_password, sha256_password, double_sha1_password\"})\n\t\treturn\n\t}\n\tif authType != \"no_password\" && strings.TrimSpace(body.Password) == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"password is required for selected auth_type\"})\n\t\treturn\n\t}\n\n\tifExists := body.IfExists == nil || *body.IfExists\n\tvar b strings.Builder\n\tb.WriteString(\"ALTER USER \")\n\tif ifExists {\n\t\tb.WriteString(\"IF EXISTS \")\n\t}\n\tb.WriteString(escapeIdentifier(username))\n\tb.WriteString(buildClickHouseAlterAuthClause(authType, body.Password))\n\n\tif _, err := h.Gateway.ExecuteQuery(session.ConnectionID, b.String(), session.ClickhouseUser, password, 30*time.Second); err != nil {\n\t\tslog.Warn(\"Failed to update ClickHouse user password\", \"error\", err, \"connection\", session.ConnectionID, \"name\", username)\n\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": fmt.Sprintf(\"%s\\n\\nCommand:\\n%s\", err.Error(), b.String())})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"admin.clickhouse_user.password_changed\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"name=%s auth_type=%s\", username, authType)),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\": true,\n\t\t\"command\": b.String(),\n\t})\n}\n\nfunc buildClickHouseCreateAuthClause(authType, password string) string {\n\tswitch authType {\n\tcase \"no_password\":\n\t\treturn \"\"\n\tcase \"plaintext_password\":\n\t\treturn \" IDENTIFIED BY '\" + escapeLiteral(password) + \"'\"\n\tdefault:\n\t\treturn \" IDENTIFIED WITH \" + authType + \" BY '\" + escapeLiteral(password) + \"'\"\n\t}\n}\n\nfunc buildClickHouseAlterAuthClause(authType, password string) string {\n\tswitch authType {\n\tcase \"no_password\":\n\t\treturn \" IDENTIFIED WITH no_password\"\n\tcase \"plaintext_password\":\n\t\treturn \" IDENTIFIED BY '\" + escapeLiteral(password) + \"'\"\n\tdefault:\n\t\treturn \" IDENTIFIED WITH \" + authType + \" BY '\" + escapeLiteral(password) + \"'\"\n\t}\n}\n\nfunc parseDefaultRolesInput(input []string) (all bool, roles []string, err error) {\n\tseen := make(map[string]struct{}, len(input))\n\tfor _, raw := range input {\n\t\trole := strings.TrimSpace(raw)\n\t\tif role == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tif strings.EqualFold(role, \"ALL\") {\n\t\t\tall = true\n\t\t\tcontinue\n\t\t}\n\t\tkey := strings.ToLower(role)\n\t\tif _, exists := seen[key]; exists {\n\t\t\tcontinue\n\t\t}\n\t\tseen[key] = struct{}{}\n\t\troles = append(roles, role)\n\t}\n\tif all && len(roles) > 0 {\n\t\treturn false, nil, fmt.Errorf(\"default_roles cannot mix ALL with named roles\")\n\t}\n\treturn all, roles, nil\n}\n\n// ---------- DELETE /clickhouse-users/{username} ----------\n\nfunc (h *AdminHandler) DeleteClickHouseUser(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\tif !h.Gateway.IsTunnelOnline(session.ConnectionID) {\n\t\twriteJSON(w, http.StatusServiceUnavailable, map[string]string{\"error\": \"Tunnel is offline\"})\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to decrypt credentials\"})\n\t\treturn\n\t}\n\n\tusername := strings.TrimSpace(chi.URLParam(r, \"username\"))\n\tif username == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"username is required\"})\n\t\treturn\n\t}\n\tif username == session.ClickhouseUser {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"cannot delete current session user\"})\n\t\treturn\n\t}\n\n\tifExists := true\n\tif raw := strings.ToLower(strings.TrimSpace(r.URL.Query().Get(\"if_exists\"))); raw == \"false\" || raw == \"0\" {\n\t\tifExists = false\n\t}\n\n\tsql := \"DROP USER \"\n\tif ifExists {\n\t\tsql += \"IF EXISTS \"\n\t}\n\tsql += escapeIdentifier(username)\n\n\tif _, err := h.Gateway.ExecuteQuery(session.ConnectionID, sql, session.ClickhouseUser, password, 30*time.Second); err != nil {\n\t\tslog.Warn(\"Failed to delete ClickHouse user\", \"error\", err, \"connection\", session.ConnectionID, \"name\", username)\n\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": fmt.Sprintf(\"%s\\n\\nCommand:\\n%s\", err.Error(), sql)})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"admin.clickhouse_user.deleted\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"name=%s\", username)),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\": true,\n\t\t\"command\": sql,\n\t})\n}\n\n// ---------- Helpers ----------\n\nfunc escapeString(s string) string {\n\ts = strings.ReplaceAll(s, `\\`, `\\\\`)\n\ts = strings.ReplaceAll(s, `'`, `''`)\n\ts = strings.ReplaceAll(s, `%`, `\\%`)\n\ts = strings.ReplaceAll(s, `_`, `\\_`)\n\treturn s\n}\n\n"
  },
  {
    "path": "internal/server/handlers/admin_brain.go",
    "content": "package handlers\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"sort\"\n\t\"strings\"\n\t\"time\"\n\n\tbraincore \"github.com/caioricciuti/ch-ui/internal/brain\"\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/go-chi/chi/v5\"\n)\n\nfunc normalizeProviderKind(kind string) (string, bool) {\n\tswitch strings.ToLower(strings.TrimSpace(kind)) {\n\tcase \"openai\":\n\t\treturn \"openai\", true\n\tcase \"openai_compatible\", \"openai-compatible\":\n\t\treturn \"openai_compatible\", true\n\tcase \"ollama\":\n\t\treturn \"ollama\", true\n\tdefault:\n\t\treturn \"\", false\n\t}\n}\n\nfunc modelDisplayName(m database.BrainModel) string {\n\tif m.DisplayName != nil && strings.TrimSpace(*m.DisplayName) != \"\" {\n\t\treturn strings.TrimSpace(*m.DisplayName)\n\t}\n\treturn m.Name\n}\n\nfunc scoreRecommendedModel(name string) int {\n\tn := strings.ToLower(strings.TrimSpace(name))\n\tswitch {\n\tcase strings.Contains(n, \"gpt-5\"):\n\t\treturn 100\n\tcase strings.Contains(n, \"gpt-4.1\"):\n\t\treturn 95\n\tcase strings.Contains(n, \"gpt-4o\"):\n\t\treturn 90\n\tcase strings.Contains(n, \"gpt-4\"):\n\t\treturn 80\n\tcase strings.Contains(n, \"o3\"), strings.Contains(n, \"o1\"):\n\t\treturn 70\n\tcase strings.Contains(n, \"claude\"):\n\t\treturn 60\n\tcase strings.Contains(n, \"llama\"), strings.Contains(n, \"qwen\"), strings.Contains(n, \"mistral\"), strings.Contains(n, \"gemma\"):\n\t\treturn 50\n\tdefault:\n\t\treturn 10\n\t}\n}\n\nfunc pickRecommendedModel(models []database.BrainModel) *database.BrainModel {\n\tif len(models) == 0 {\n\t\treturn nil\n\t}\n\tordered := make([]database.BrainModel, 0, len(models))\n\tordered = append(ordered, models...)\n\tsort.SliceStable(ordered, func(i, j int) bool {\n\t\ta := ordered[i]\n\t\tb := ordered[j]\n\t\tsa := scoreRecommendedModel(a.Name)\n\t\tsb := scoreRecommendedModel(b.Name)\n\t\tif sa != sb {\n\t\t\treturn sa > sb\n\t\t}\n\t\treturn strings.ToLower(a.Name) < strings.ToLower(b.Name)\n\t})\n\treturn &ordered[0]\n}\n\nfunc applyModelBulkAction(db *database.DB, providerID, action string) (int, error) {\n\tmodels, err := db.GetBrainModels(providerID)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tif len(models) == 0 {\n\t\treturn 0, nil\n\t}\n\n\tswitch action {\n\tcase \"deactivate_all\":\n\t\tif err := db.ClearDefaultBrainModelsByProvider(providerID); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t\tfor _, m := range models {\n\t\t\tif err := db.SetBrainModelActive(m.ID, false); err != nil {\n\t\t\t\treturn 0, err\n\t\t\t}\n\t\t}\n\t\treturn len(models), nil\n\tcase \"activate_all\":\n\t\tvar defaultModelID string\n\t\tfor _, m := range models {\n\t\t\tif m.IsDefault {\n\t\t\t\tdefaultModelID = m.ID\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif defaultModelID == \"\" {\n\t\t\trec := pickRecommendedModel(models)\n\t\t\tif rec != nil {\n\t\t\t\tdefaultModelID = rec.ID\n\t\t\t}\n\t\t}\n\t\tfor _, m := range models {\n\t\t\tdisplay := modelDisplayName(m)\n\t\t\tisDefault := m.ID == defaultModelID\n\t\t\tif err := db.UpdateBrainModel(m.ID, display, true, isDefault); err != nil {\n\t\t\t\treturn 0, err\n\t\t\t}\n\t\t}\n\t\treturn len(models), nil\n\tcase \"activate_recommended\":\n\t\trec := pickRecommendedModel(models)\n\t\tif rec == nil {\n\t\t\treturn 0, nil\n\t\t}\n\t\tif err := db.ClearDefaultBrainModelsByProvider(providerID); err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t\tfor _, m := range models {\n\t\t\tdisplay := modelDisplayName(m)\n\t\t\tisRec := m.ID == rec.ID\n\t\t\tif err := db.UpdateBrainModel(m.ID, display, isRec, isRec); err != nil {\n\t\t\t\treturn 0, err\n\t\t\t}\n\t\t}\n\t\treturn len(models), nil\n\tdefault:\n\t\treturn 0, fmt.Errorf(\"unsupported action: %s\", action)\n\t}\n}\n\nfunc (h *AdminHandler) ListBrainProviders(w http.ResponseWriter, r *http.Request) {\n\tproviders, err := h.DB.GetBrainProviders()\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list providers\"})\n\t\treturn\n\t}\n\tif providers == nil {\n\t\tproviders = []database.BrainProvider{}\n\t}\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"providers\": providers})\n}\n\nfunc (h *AdminHandler) CreateBrainProvider(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\n\tvar body struct {\n\t\tName      string `json:\"name\"`\n\t\tKind      string `json:\"kind\"`\n\t\tBaseURL   string `json:\"baseUrl\"`\n\t\tAPIKey    string `json:\"apiKey\"`\n\t\tIsActive  *bool  `json:\"isActive\"`\n\t\tIsDefault *bool  `json:\"isDefault\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Provider name is required\"})\n\t\treturn\n\t}\n\tkind, ok := normalizeProviderKind(body.Kind)\n\tif !ok {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Provider kind must be openai, openai_compatible, or ollama\"})\n\t\treturn\n\t}\n\n\tvar encryptedKey *string\n\tif strings.TrimSpace(body.APIKey) != \"\" {\n\t\tencrypted, err := crypto.Encrypt(strings.TrimSpace(body.APIKey), h.Config.AppSecretKey)\n\t\tif err != nil {\n\t\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to encrypt API key\"})\n\t\t\treturn\n\t\t}\n\t\tencryptedKey = &encrypted\n\t}\n\n\tisActive := true\n\tif body.IsActive != nil {\n\t\tisActive = *body.IsActive\n\t}\n\tisDefault := false\n\tif body.IsDefault != nil {\n\t\tisDefault = *body.IsDefault\n\t}\n\n\tactor := \"\"\n\tif session != nil {\n\t\tactor = session.ClickhouseUser\n\t}\n\tproviderID, err := h.DB.CreateBrainProvider(name, kind, body.BaseURL, encryptedKey, isActive, isDefault, actor)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create provider\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"brain.provider.created\",\n\t\tUsername:  strPtr(actor),\n\t\tDetails:   strPtr(fmt.Sprintf(\"provider=%s kind=%s\", name, kind)),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"success\": true, \"id\": providerID})\n}\n\nfunc (h *AdminHandler) UpdateBrainProvider(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tproviderID := chi.URLParam(r, \"id\")\n\tif strings.TrimSpace(providerID) == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Provider ID is required\"})\n\t\treturn\n\t}\n\n\texisting, err := h.DB.GetBrainProviderByID(providerID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load provider\"})\n\t\treturn\n\t}\n\tif existing == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Provider not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName      *string `json:\"name\"`\n\t\tKind      *string `json:\"kind\"`\n\t\tBaseURL   *string `json:\"baseUrl\"`\n\t\tAPIKey    *string `json:\"apiKey\"`\n\t\tIsActive  *bool   `json:\"isActive\"`\n\t\tIsDefault *bool   `json:\"isDefault\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := existing.Name\n\tif body.Name != nil && strings.TrimSpace(*body.Name) != \"\" {\n\t\tname = strings.TrimSpace(*body.Name)\n\t}\n\tkind := existing.Kind\n\tif body.Kind != nil {\n\t\tn, ok := normalizeProviderKind(*body.Kind)\n\t\tif !ok {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Provider kind must be openai, openai_compatible, or ollama\"})\n\t\t\treturn\n\t\t}\n\t\tkind = n\n\t}\n\tbaseURL := \"\"\n\tif existing.BaseURL != nil {\n\t\tbaseURL = *existing.BaseURL\n\t}\n\tif body.BaseURL != nil {\n\t\tbaseURL = strings.TrimSpace(*body.BaseURL)\n\t}\n\tisActive := existing.IsActive\n\tif body.IsActive != nil {\n\t\tisActive = *body.IsActive\n\t}\n\tisDefault := existing.IsDefault\n\tif body.IsDefault != nil {\n\t\tisDefault = *body.IsDefault\n\t}\n\n\tupdateAPIKey := false\n\tvar encryptedKey *string\n\tif body.APIKey != nil {\n\t\tupdateAPIKey = true\n\t\tif strings.TrimSpace(*body.APIKey) != \"\" {\n\t\t\tencrypted, encErr := crypto.Encrypt(strings.TrimSpace(*body.APIKey), h.Config.AppSecretKey)\n\t\t\tif encErr != nil {\n\t\t\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to encrypt API key\"})\n\t\t\t\treturn\n\t\t\t}\n\t\t\tencryptedKey = &encrypted\n\t\t}\n\t}\n\n\tif err := h.DB.UpdateBrainProvider(providerID, name, kind, baseURL, encryptedKey, updateAPIKey, isActive, isDefault); err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update provider\"})\n\t\treturn\n\t}\n\n\tactor := \"\"\n\tif session != nil {\n\t\tactor = session.ClickhouseUser\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"brain.provider.updated\",\n\t\tUsername:  strPtr(actor),\n\t\tDetails:   strPtr(fmt.Sprintf(\"provider_id=%s\", providerID)),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *AdminHandler) DeleteBrainProvider(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tproviderID := chi.URLParam(r, \"id\")\n\tif strings.TrimSpace(providerID) == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Provider ID is required\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.DeleteBrainProvider(providerID); err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete provider\"})\n\t\treturn\n\t}\n\n\tactor := \"\"\n\tif session != nil {\n\t\tactor = session.ClickhouseUser\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"brain.provider.deleted\",\n\t\tUsername:  strPtr(actor),\n\t\tDetails:   strPtr(fmt.Sprintf(\"provider_id=%s\", providerID)),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *AdminHandler) SyncBrainProviderModels(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tproviderID := chi.URLParam(r, \"id\")\n\tif strings.TrimSpace(providerID) == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Provider ID is required\"})\n\t\treturn\n\t}\n\n\tprovider, err := h.DB.GetBrainProviderByID(providerID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load provider\"})\n\t\treturn\n\t}\n\tif provider == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Provider not found\"})\n\t\treturn\n\t}\n\n\tadapter, err := braincore.NewProvider(provider.Kind)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tcfg := braincore.ProviderConfig{Kind: provider.Kind}\n\tif provider.BaseURL != nil {\n\t\tcfg.BaseURL = *provider.BaseURL\n\t}\n\tif provider.EncryptedAPIKey != nil {\n\t\tdecrypted, decErr := crypto.Decrypt(*provider.EncryptedAPIKey, h.Config.AppSecretKey)\n\t\tif decErr != nil {\n\t\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to decrypt provider API key\"})\n\t\t\treturn\n\t\t}\n\t\tcfg.APIKey = decrypted\n\t}\n\n\tctx, cancel := context.WithTimeout(r.Context(), 30*time.Second)\n\tdefer cancel()\n\n\tmodelNames, err := adapter.ListModels(ctx, cfg)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\tif len(modelNames) == 0 {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"models\": []database.BrainModel{}})\n\t\treturn\n\t}\n\n\tvar firstID string\n\tfor _, name := range modelNames {\n\t\tid, ensureErr := h.DB.EnsureBrainModel(providerID, name, name)\n\t\tif ensureErr != nil {\n\t\t\tslog.Warn(\"Failed to sync model\", \"provider\", providerID, \"model\", name, \"error\", ensureErr)\n\t\t\tcontinue\n\t\t}\n\t\tif firstID == \"\" {\n\t\t\tfirstID = id\n\t\t}\n\t}\n\n\tmodels, _ := h.DB.GetBrainModels(providerID)\n\thasDefault := false\n\thasActive := false\n\tfor _, m := range models {\n\t\tif m.IsDefault {\n\t\t\thasDefault = true\n\t\t}\n\t\tif m.IsActive {\n\t\t\thasActive = true\n\t\t}\n\t\tif hasDefault && hasActive {\n\t\t\tbreak\n\t\t}\n\t}\n\tif !hasDefault || !hasActive {\n\t\trec := pickRecommendedModel(models)\n\t\tif rec != nil {\n\t\t\t_ = h.DB.ClearDefaultBrainModelsByProvider(providerID)\n\t\t\t_ = h.DB.UpdateBrainModel(rec.ID, modelDisplayName(*rec), true, true)\n\t\t} else if firstID != \"\" {\n\t\t\t_ = h.DB.UpdateBrainModel(firstID, modelNames[0], true, true)\n\t\t}\n\t\tmodels, _ = h.DB.GetBrainModels(providerID)\n\t}\n\n\tactor := \"\"\n\tif session != nil {\n\t\tactor = session.ClickhouseUser\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"brain.provider.models_synced\",\n\t\tUsername:  strPtr(actor),\n\t\tDetails:   strPtr(fmt.Sprintf(\"provider_id=%s models=%d\", providerID, len(modelNames))),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"models\": models})\n}\n\nfunc (h *AdminHandler) ListBrainModels(w http.ResponseWriter, r *http.Request) {\n\tmodels, err := h.DB.GetBrainModelsWithProvider(false)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list models\"})\n\t\treturn\n\t}\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"models\": models})\n}\n\nfunc (h *AdminHandler) UpdateBrainModel(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tmodelID := chi.URLParam(r, \"id\")\n\tif strings.TrimSpace(modelID) == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Model ID is required\"})\n\t\treturn\n\t}\n\n\texisting, err := h.DB.GetBrainModelByID(modelID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load model\"})\n\t\treturn\n\t}\n\tif existing == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Model not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tDisplayName *string `json:\"displayName\"`\n\t\tIsActive    *bool   `json:\"isActive\"`\n\t\tIsDefault   *bool   `json:\"isDefault\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tdisplayName := existing.Name\n\tif existing.DisplayName != nil && strings.TrimSpace(*existing.DisplayName) != \"\" {\n\t\tdisplayName = *existing.DisplayName\n\t}\n\tif body.DisplayName != nil {\n\t\tdisplayName = strings.TrimSpace(*body.DisplayName)\n\t}\n\tisActive := existing.IsActive\n\tif body.IsActive != nil {\n\t\tisActive = *body.IsActive\n\t}\n\tisDefault := existing.IsDefault\n\tif body.IsDefault != nil {\n\t\tisDefault = *body.IsDefault\n\t}\n\tif isDefault {\n\t\tisActive = true\n\t}\n\tif !isActive {\n\t\tisDefault = false\n\t}\n\n\tif err := h.DB.UpdateBrainModel(modelID, displayName, isActive, isDefault); err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update model\"})\n\t\treturn\n\t}\n\n\tactor := \"\"\n\tif session != nil {\n\t\tactor = session.ClickhouseUser\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"brain.model.updated\",\n\t\tUsername:  strPtr(actor),\n\t\tDetails:   strPtr(fmt.Sprintf(\"model_id=%s\", modelID)),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *AdminHandler) BulkUpdateBrainModels(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\n\tvar body struct {\n\t\tProviderID string `json:\"providerId\"`\n\t\tAction     string `json:\"action\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tproviderID := strings.TrimSpace(body.ProviderID)\n\tif providerID == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"providerId is required\"})\n\t\treturn\n\t}\n\taction := strings.TrimSpace(body.Action)\n\tif action != \"deactivate_all\" && action != \"activate_all\" && action != \"activate_recommended\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"action must be one of: deactivate_all, activate_all, activate_recommended\"})\n\t\treturn\n\t}\n\n\tupdated, err := applyModelBulkAction(h.DB, providerID, action)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to apply bulk model action\"})\n\t\treturn\n\t}\n\n\tactor := \"\"\n\tif session != nil {\n\t\tactor = session.ClickhouseUser\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"brain.model.bulk_updated\",\n\t\tUsername:  strPtr(actor),\n\t\tDetails:   strPtr(fmt.Sprintf(\"provider_id=%s action=%s updated=%d\", providerID, action, updated)),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\": true,\n\t\t\"updated\": updated,\n\t})\n}\n\nfunc (h *AdminHandler) ListBrainSkills(w http.ResponseWriter, r *http.Request) {\n\tskills, err := h.DB.GetBrainSkills()\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list skills\"})\n\t\treturn\n\t}\n\tif skills == nil {\n\t\tskills = []database.BrainSkill{}\n\t}\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"skills\": skills})\n}\n\nfunc (h *AdminHandler) CreateBrainSkill(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\n\tvar body struct {\n\t\tName      string `json:\"name\"`\n\t\tContent   string `json:\"content\"`\n\t\tIsActive  *bool  `json:\"isActive\"`\n\t\tIsDefault *bool  `json:\"isDefault\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\tcontent := strings.TrimSpace(body.Content)\n\tif name == \"\" || content == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Name and content are required\"})\n\t\treturn\n\t}\n\n\tisActive := true\n\tif body.IsActive != nil {\n\t\tisActive = *body.IsActive\n\t}\n\tisDefault := false\n\tif body.IsDefault != nil {\n\t\tisDefault = *body.IsDefault\n\t}\n\n\tactor := \"\"\n\tif session != nil {\n\t\tactor = session.ClickhouseUser\n\t}\n\tid, err := h.DB.CreateBrainSkill(name, content, actor, isActive, isDefault)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create skill\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"brain.skill.created\",\n\t\tUsername:  strPtr(actor),\n\t\tDetails:   strPtr(fmt.Sprintf(\"skill=%s\", name)),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"success\": true, \"id\": id})\n}\n\nfunc (h *AdminHandler) UpdateBrainSkill(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tskillID := chi.URLParam(r, \"id\")\n\tif strings.TrimSpace(skillID) == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Skill ID is required\"})\n\t\treturn\n\t}\n\n\texisting, err := h.DB.GetBrainSkillByID(skillID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load skill\"})\n\t\treturn\n\t}\n\tif existing == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Skill not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName      *string `json:\"name\"`\n\t\tContent   *string `json:\"content\"`\n\t\tIsActive  *bool   `json:\"isActive\"`\n\t\tIsDefault *bool   `json:\"isDefault\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := existing.Name\n\tif body.Name != nil && strings.TrimSpace(*body.Name) != \"\" {\n\t\tname = strings.TrimSpace(*body.Name)\n\t}\n\tcontent := existing.Content\n\tif body.Content != nil {\n\t\tcontent = strings.TrimSpace(*body.Content)\n\t}\n\tisActive := existing.IsActive\n\tif body.IsActive != nil {\n\t\tisActive = *body.IsActive\n\t}\n\tisDefault := existing.IsDefault\n\tif body.IsDefault != nil {\n\t\tisDefault = *body.IsDefault\n\t}\n\n\tif strings.TrimSpace(name) == \"\" || strings.TrimSpace(content) == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Name and content are required\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.UpdateBrainSkill(skillID, name, content, isActive, isDefault); err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update skill\"})\n\t\treturn\n\t}\n\n\tactor := \"\"\n\tif session != nil {\n\t\tactor = session.ClickhouseUser\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"brain.skill.updated\",\n\t\tUsername:  strPtr(actor),\n\t\tDetails:   strPtr(fmt.Sprintf(\"skill_id=%s\", skillID)),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n"
  },
  {
    "path": "internal/server/handlers/admin_governance.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"net/http\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n)\n\n// governanceSettingsResponse is the shape returned by the governance settings\n// endpoints so the frontend can render the toggle state + last-change metadata.\ntype governanceSettingsResponse struct {\n\tSyncEnabled      bool   `json:\"sync_enabled\"`\n\tUpdatedAt        string `json:\"updated_at\"`\n\tUpdatedBy        string `json:\"updated_by\"`\n\tBannerDismissed  bool   `json:\"banner_dismissed\"`\n\tSyncerRunning    bool   `json:\"syncer_running\"`\n}\n\n// GetGovernanceSettings returns the current governance sync toggle state.\nfunc (h *AdminHandler) GetGovernanceSettings(w http.ResponseWriter, r *http.Request) {\n\tresp := h.buildGovernanceSettingsResponse()\n\twriteJSON(w, http.StatusOK, resp)\n}\n\n// UpdateGovernanceSettings flips the governance sync toggle and, if the syncer\n// handle is wired in, starts or stops the background goroutine accordingly.\nfunc (h *AdminHandler) UpdateGovernanceSettings(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\n\tvar body struct {\n\t\tSyncEnabled     *bool `json:\"sync_enabled\"`\n\t\tBannerDismissed *bool `json:\"banner_dismissed\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tactor := \"unknown\"\n\tif session != nil {\n\t\tactor = session.ClickhouseUser\n\t}\n\n\tif body.SyncEnabled != nil {\n\t\tif err := h.DB.SetGovernanceSyncEnabled(*body.SyncEnabled, actor); err != nil {\n\t\t\tslog.Error(\"Failed to persist governance sync setting\", \"error\", err)\n\t\t\twriteError(w, http.StatusInternalServerError, \"Failed to save setting\")\n\t\t\treturn\n\t\t}\n\n\t\tif h.GovSyncer != nil {\n\t\t\tif *body.SyncEnabled {\n\t\t\t\th.GovSyncer.StartBackground()\n\t\t\t} else {\n\t\t\t\th.GovSyncer.Stop()\n\t\t\t}\n\t\t}\n\n\t\tdetails := fmt.Sprintf(`{\"sync_enabled\":%t}`, *body.SyncEnabled)\n\t\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\t\tAction:    \"governance.sync_toggle\",\n\t\t\tUsername:  strPtr(actor),\n\t\t\tDetails:   strPtr(details),\n\t\t\tIPAddress: strPtr(r.RemoteAddr),\n\t\t})\n\t}\n\n\tif body.BannerDismissed != nil && *body.BannerDismissed {\n\t\tif err := h.DB.SetSetting(database.SettingGovernanceUpgradeBanner, \"true\"); err != nil {\n\t\t\tslog.Warn(\"Failed to persist governance banner dismissal\", \"error\", err)\n\t\t}\n\t}\n\n\twriteJSON(w, http.StatusOK, h.buildGovernanceSettingsResponse())\n}\n\nfunc (h *AdminHandler) buildGovernanceSettingsResponse() governanceSettingsResponse {\n\tenabled := h.DB.GovernanceSyncEnabled()\n\tupdatedAt, _ := h.DB.GetSetting(database.SettingGovernanceSyncUpdatedAt)\n\tupdatedBy, _ := h.DB.GetSetting(database.SettingGovernanceSyncUpdatedBy)\n\tbannerDismissed, _ := h.DB.GetSetting(database.SettingGovernanceUpgradeBanner)\n\n\trunning := false\n\tif h.GovSyncer != nil {\n\t\trunning = h.GovSyncer.IsRunning()\n\t}\n\n\treturn governanceSettingsResponse{\n\t\tSyncEnabled:     enabled,\n\t\tUpdatedAt:       updatedAt,\n\t\tUpdatedBy:       updatedBy,\n\t\tBannerDismissed: bannerDismissed == \"true\",\n\t\tSyncerRunning:   running,\n\t}\n}\n"
  },
  {
    "path": "internal/server/handlers/admin_langfuse.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"net/http\"\n\t\"strings\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/langfuse\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n)\n\n// GetLangfuseConfig returns the current Langfuse configuration (secret key masked).\nfunc (h *AdminHandler) GetLangfuseConfig(w http.ResponseWriter, r *http.Request) {\n\tpublicKey, _ := h.DB.GetSetting(\"langfuse.public_key\")\n\tbaseURL, _ := h.DB.GetSetting(\"langfuse.base_url\")\n\tencryptedSecret, _ := h.DB.GetSetting(\"langfuse.secret_key\")\n\n\tif baseURL == \"\" {\n\t\tbaseURL = \"https://cloud.langfuse.com\"\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":        true,\n\t\t\"public_key\":     publicKey,\n\t\t\"base_url\":       baseURL,\n\t\t\"has_secret_key\": encryptedSecret != \"\",\n\t\t\"enabled\":        h.Langfuse.IsEnabled(),\n\t})\n}\n\n// UpdateLangfuseConfig saves Langfuse configuration and reconfigures the client.\nfunc (h *AdminHandler) UpdateLangfuseConfig(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\n\tvar body struct {\n\t\tPublicKey string `json:\"publicKey\"`\n\t\tSecretKey string `json:\"secretKey\"`\n\t\tBaseURL   string `json:\"baseUrl\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tpublicKey := strings.TrimSpace(body.PublicKey)\n\tsecretKey := strings.TrimSpace(body.SecretKey)\n\tbaseURL := strings.TrimSpace(body.BaseURL)\n\n\tif publicKey == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"Public key is required\")\n\t\treturn\n\t}\n\n\t// Save public key\n\tif err := h.DB.SetSetting(\"langfuse.public_key\", publicKey); err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to save public key\")\n\t\treturn\n\t}\n\n\t// Save base URL\n\tif baseURL == \"\" {\n\t\tbaseURL = \"https://cloud.langfuse.com\"\n\t}\n\tif err := h.DB.SetSetting(\"langfuse.base_url\", baseURL); err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to save base URL\")\n\t\treturn\n\t}\n\n\t// Save secret key (only if provided — allows partial update)\n\tif secretKey != \"\" {\n\t\tencrypted, err := crypto.Encrypt(secretKey, h.Config.AppSecretKey)\n\t\tif err != nil {\n\t\t\twriteError(w, http.StatusInternalServerError, \"Failed to encrypt secret key\")\n\t\t\treturn\n\t\t}\n\t\tif err := h.DB.SetSetting(\"langfuse.secret_key\", encrypted); err != nil {\n\t\t\twriteError(w, http.StatusInternalServerError, \"Failed to save secret key\")\n\t\t\treturn\n\t\t}\n\t}\n\n\t// Reconfigure the live client\n\tcfg, err := loadLangfuseConfig(h.DB, h.Config.AppSecretKey)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to reload config\")\n\t\treturn\n\t}\n\th.Langfuse.Reconfigure(cfg)\n\n\t// Audit log\n\tactor := \"unknown\"\n\tif session != nil {\n\t\tactor = session.ClickhouseUser\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"langfuse.config.updated\",\n\t\tUsername: strPtr(actor),\n\t\tDetails:  strPtr(\"base_url=\" + baseURL),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\": true,\n\t\t\"enabled\": h.Langfuse.IsEnabled(),\n\t})\n}\n\n// DeleteLangfuseConfig removes all Langfuse settings and disables the client.\nfunc (h *AdminHandler) DeleteLangfuseConfig(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\n\t_ = h.DB.DeleteSetting(\"langfuse.public_key\")\n\t_ = h.DB.DeleteSetting(\"langfuse.secret_key\")\n\t_ = h.DB.DeleteSetting(\"langfuse.base_url\")\n\n\th.Langfuse.Reconfigure(langfuse.Config{})\n\n\tactor := \"unknown\"\n\tif session != nil {\n\t\tactor = session.ClickhouseUser\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"langfuse.config.deleted\",\n\t\tUsername: strPtr(actor),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\": true,\n\t\t\"enabled\": false,\n\t})\n}\n\n// TestLangfuseConnection verifies credentials against the Langfuse API.\nfunc (h *AdminHandler) TestLangfuseConnection(w http.ResponseWriter, r *http.Request) {\n\tvar body struct {\n\t\tPublicKey string `json:\"publicKey\"`\n\t\tSecretKey string `json:\"secretKey\"`\n\t\tBaseURL   string `json:\"baseUrl\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tpublicKey := strings.TrimSpace(body.PublicKey)\n\tsecretKey := strings.TrimSpace(body.SecretKey)\n\tbaseURL := strings.TrimSpace(body.BaseURL)\n\n\t// If secret key not provided, use stored one\n\tif secretKey == \"\" {\n\t\tencryptedSecret, _ := h.DB.GetSetting(\"langfuse.secret_key\")\n\t\tif encryptedSecret != \"\" {\n\t\t\tdecrypted, err := crypto.Decrypt(encryptedSecret, h.Config.AppSecretKey)\n\t\t\tif err == nil {\n\t\t\t\tsecretKey = decrypted\n\t\t\t}\n\t\t}\n\t}\n\n\tif publicKey == \"\" || secretKey == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"Public key and secret key are required\")\n\t\treturn\n\t}\n\n\tcfg := langfuse.Config{\n\t\tPublicKey: publicKey,\n\t\tSecretKey: secretKey,\n\t\tBaseURL:   baseURL,\n\t}\n\n\tif err := h.Langfuse.TestConnection(cfg); err != nil {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"success\":   true,\n\t\t\t\"connected\": false,\n\t\t\t\"error\":     err.Error(),\n\t\t})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":   true,\n\t\t\"connected\": true,\n\t})\n}\n\n// loadLangfuseConfig reads Langfuse configuration from the settings table.\nfunc loadLangfuseConfig(db *database.DB, appSecretKey string) (langfuse.Config, error) {\n\tvar cfg langfuse.Config\n\n\tpublicKey, err := db.GetSetting(\"langfuse.public_key\")\n\tif err != nil {\n\t\treturn cfg, err\n\t}\n\tcfg.PublicKey = publicKey\n\n\tencryptedSecret, err := db.GetSetting(\"langfuse.secret_key\")\n\tif err != nil {\n\t\treturn cfg, err\n\t}\n\tif encryptedSecret != \"\" {\n\t\tdecrypted, err := crypto.Decrypt(encryptedSecret, appSecretKey)\n\t\tif err != nil {\n\t\t\treturn cfg, err\n\t\t}\n\t\tcfg.SecretKey = decrypted\n\t}\n\n\tbaseURL, err := db.GetSetting(\"langfuse.base_url\")\n\tif err != nil {\n\t\treturn cfg, err\n\t}\n\tcfg.BaseURL = baseURL\n\tcfg.NormalizeBaseURL()\n\n\treturn cfg, nil\n}\n"
  },
  {
    "path": "internal/server/handlers/auth.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/go-chi/chi/v5\"\n\t\"github.com/google/uuid\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n\t\"github.com/caioricciuti/ch-ui/internal/version\"\n)\n\n// Session and rate-limit constants.\nconst (\n\tSessionCookie      = \"chui_session\"\n\tSessionDuration    = 7 * 24 * time.Hour\n\tRateLimitWindow    = 15 * time.Minute\n\tMaxAttemptsPerIP   = 5\n\tMaxAttemptsPerUser = 3\n)\n\n// AuthHandler implements the authentication HTTP endpoints.\ntype AuthHandler struct {\n\tDB          *database.DB\n\tGateway     *tunnel.Gateway\n\tRateLimiter *middleware.RateLimiter\n\tConfig      *config.Config\n}\n\n// Routes returns a chi.Router with all auth routes mounted.\nfunc (h *AuthHandler) Routes(r chi.Router) {\n\tr.Post(\"/login\", h.Login)\n\tr.Post(\"/logout\", h.Logout)\n\tr.Get(\"/session\", h.Session)\n\tr.Get(\"/connections\", h.Connections)\n\tr.Post(\"/switch-connection\", h.SwitchConnection)\n}\n\n// ---------- request / response types ----------\n\ntype loginRequest struct {\n\tUsername          string `json:\"username\"`\n\tPassword          string `json:\"password\"`\n\tConnectionID      string `json:\"connectionId\"`\n\tConnectionIDSnake string `json:\"connection_id\"`\n}\n\ntype switchConnectionRequest struct {\n\tConnectionID      string `json:\"connectionId\"`\n\tConnectionIDSnake string `json:\"connection_id\"`\n\tUsername          string `json:\"username\"`\n\tPassword          string `json:\"password\"`\n}\n\ntype connectionInfo struct {\n\tID         string  `json:\"id\"`\n\tName       string  `json:\"name\"`\n\tStatus     string  `json:\"status\"`\n\tOnline     bool    `json:\"online\"`\n\tLastSeenAt *string `json:\"last_seen_at\"`\n\tCreatedAt  string  `json:\"created_at\"`\n}\n\nfunc (r loginRequest) resolvedConnectionID() string {\n\tif id := strings.TrimSpace(r.ConnectionID); id != \"\" {\n\t\treturn id\n\t}\n\treturn strings.TrimSpace(r.ConnectionIDSnake)\n}\n\nfunc (r switchConnectionRequest) resolvedConnectionID() string {\n\tif id := strings.TrimSpace(r.ConnectionID); id != \"\" {\n\t\treturn id\n\t}\n\treturn strings.TrimSpace(r.ConnectionIDSnake)\n}\n\nfunc normalizeRateLimitUsername(username string) string {\n\treturn strings.ToLower(strings.TrimSpace(username))\n}\n\nfunc userRateLimitKey(username, connectionID string) string {\n\treturn fmt.Sprintf(\"user:%s:%s\", normalizeRateLimitUsername(username), strings.TrimSpace(connectionID))\n}\n\nfunc sanitizeClickHouseAuthMessage(raw string) string {\n\tmsg := strings.ToLower(strings.TrimSpace(raw))\n\tif msg == \"\" {\n\t\treturn \"Invalid credentials\"\n\t}\n\tif strings.Contains(msg, \"auth\") ||\n\t\tstrings.Contains(msg, \"credential\") ||\n\t\tstrings.Contains(msg, \"password\") ||\n\t\tstrings.Contains(msg, \"unauthorized\") ||\n\t\tstrings.Contains(msg, \"access denied\") {\n\t\treturn \"Invalid credentials\"\n\t}\n\tif strings.Contains(msg, \"timeout\") ||\n\t\tstrings.Contains(msg, \"deadline\") ||\n\t\tstrings.Contains(msg, \"refused\") ||\n\t\tstrings.Contains(msg, \"no route\") ||\n\t\tstrings.Contains(msg, \"connection reset\") ||\n\t\tstrings.Contains(msg, \"network\") ||\n\t\tstrings.Contains(msg, \"tls\") {\n\t\treturn \"Connection to ClickHouse failed\"\n\t}\n\treturn \"Authentication failed\"\n}\n\nfunc shouldUseSecureCookie(r *http.Request, cfg *config.Config) bool {\n\t// Direct TLS request (no proxy)\n\tif r != nil && r.TLS != nil {\n\t\treturn true\n\t}\n\t// Reverse proxy forwarding HTTPS\n\tif r != nil && strings.EqualFold(strings.TrimSpace(r.Header.Get(\"X-Forwarded-Proto\")), \"https\") {\n\t\treturn true\n\t}\n\t// Fallback to configured public app URL scheme.\n\tif cfg != nil && strings.TrimSpace(cfg.AppURL) != \"\" {\n\t\tif parsed, err := url.Parse(cfg.AppURL); err == nil {\n\t\t\treturn strings.EqualFold(parsed.Scheme, \"https\")\n\t\t}\n\t}\n\treturn false\n}\n\n// ---------- POST /login ----------\n\nfunc (h *AuthHandler) Login(w http.ResponseWriter, r *http.Request) {\n\tvar req loginRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\treq.Username = strings.TrimSpace(req.Username)\n\tif req.Username == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Username is required\"})\n\t\treturn\n\t}\n\treq.ConnectionID = req.resolvedConnectionID()\n\n\t// --- Rate limiting ---\n\tclientIP := getClientIP(r)\n\tipKey := fmt.Sprintf(\"ip:%s\", clientIP)\n\n\tipResult := h.RateLimiter.CheckAuthRateLimit(ipKey, \"ip\", MaxAttemptsPerIP, RateLimitWindow)\n\tif !ipResult.Allowed {\n\t\tretrySeconds := int(ipResult.RetryAfter.Seconds())\n\t\tslog.Warn(\"IP rate limited\", \"ip\", clientIP, \"retryAfter\", retrySeconds)\n\t\twriteJSON(w, http.StatusTooManyRequests, map[string]interface{}{\n\t\t\t\"error\":      \"Too many login attempts from this IP\",\n\t\t\t\"retryAfter\": retrySeconds,\n\t\t})\n\t\treturn\n\t}\n\n\t// --- Resolve connection ---\n\tconnections, err := h.DB.GetConnections()\n\tif err != nil {\n\t\tslog.Error(\"Failed to get connections\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve connections\"})\n\t\treturn\n\t}\n\n\tif len(connections) == 0 {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\n\t\t\t\"error\":   \"No connections available\",\n\t\t\t\"message\": \"No connections are configured. Please set up an agent first.\",\n\t\t})\n\t\treturn\n\t}\n\n\tvar conn *database.Connection\n\tif req.ConnectionID != \"\" {\n\t\tfor i := range connections {\n\t\t\tif connections[i].ID == req.ConnectionID {\n\t\t\t\tconn = &connections[i]\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif conn == nil {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Connection not found\"})\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tconn = &connections[0]\n\t}\n\n\tuserKey := userRateLimitKey(req.Username, conn.ID)\n\tuserResult := h.RateLimiter.CheckAuthRateLimit(userKey, \"user\", MaxAttemptsPerUser, RateLimitWindow)\n\tif !userResult.Allowed {\n\t\tretrySeconds := int(userResult.RetryAfter.Seconds())\n\t\tslog.Warn(\"User rate limited\", \"user\", req.Username, \"connection\", conn.ID, \"retryAfter\", retrySeconds)\n\t\twriteJSON(w, http.StatusTooManyRequests, map[string]interface{}{\n\t\t\t\"error\":      \"Too many login attempts for this user\",\n\t\t\t\"retryAfter\": retrySeconds,\n\t\t})\n\t\treturn\n\t}\n\n\t// --- Check tunnel is online (retry up to 3 times) ---\n\tonline := false\n\tfor attempt := 0; attempt < 3; attempt++ {\n\t\tif h.Gateway.IsTunnelOnline(conn.ID) {\n\t\t\tonline = true\n\t\t\tbreak\n\t\t}\n\t\tif attempt < 2 {\n\t\t\ttime.Sleep(500 * time.Millisecond)\n\t\t}\n\t}\n\tif !online {\n\t\twriteJSON(w, http.StatusServiceUnavailable, map[string]string{\n\t\t\t\"error\":   \"Connection offline\",\n\t\t\t\"message\": \"The tunnel agent for this connection is not online. Please check that the agent is running.\",\n\t\t})\n\t\treturn\n\t}\n\n\t// --- Test ClickHouse credentials ---\n\ttestResult, err := h.Gateway.TestConnection(conn.ID, req.Username, req.Password, 15*time.Second)\n\tif err != nil {\n\t\th.RateLimiter.RecordAttempt(ipKey, \"ip\")\n\t\th.RateLimiter.RecordAttempt(userKey, \"user\")\n\t\tslog.Info(\"Login failed: connection test error\", \"user\", req.Username, \"error\", err)\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\n\t\t\t\"error\":   \"Authentication failed\",\n\t\t\t\"message\": sanitizeClickHouseAuthMessage(err.Error()),\n\t\t})\n\t\treturn\n\t}\n\tif !testResult.Success {\n\t\th.RateLimiter.RecordAttempt(ipKey, \"ip\")\n\t\th.RateLimiter.RecordAttempt(userKey, \"user\")\n\t\terrMsg := testResult.Error\n\t\tif errMsg == \"\" {\n\t\t\terrMsg = \"Invalid credentials\"\n\t\t}\n\t\tslog.Info(\"Login failed: bad credentials\", \"user\", req.Username)\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\n\t\t\t\"error\":   \"Authentication failed\",\n\t\t\t\"message\": sanitizeClickHouseAuthMessage(errMsg),\n\t\t})\n\t\treturn\n\t}\n\n\t// --- Resolve CH-UI role ---\n\trole := h.resolveUserRole(conn.ID, req.Username, req.Password, clientIP)\n\n\t// --- Encrypt password and create session ---\n\tencryptedPwd, err := crypto.Encrypt(req.Password, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to encrypt password\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Internal server error\"})\n\t\treturn\n\t}\n\n\ttoken := uuid.NewString()\n\texpiresAt := time.Now().UTC().Add(SessionDuration).Format(time.RFC3339)\n\n\t_, err = h.DB.CreateSession(database.CreateSessionParams{\n\t\tConnectionID:      conn.ID,\n\t\tClickhouseUser:    req.Username,\n\t\tEncryptedPassword: encryptedPwd,\n\t\tToken:             token,\n\t\tExpiresAt:         expiresAt,\n\t\tUserRole:          role,\n\t})\n\tif err != nil {\n\t\tslog.Error(\"Failed to create session\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create session\"})\n\t\treturn\n\t}\n\n\t// --- Set cookie ---\n\thttp.SetCookie(w, &http.Cookie{\n\t\tName:     SessionCookie,\n\t\tValue:    token,\n\t\tPath:     \"/\",\n\t\tMaxAge:   int(SessionDuration.Seconds()),\n\t\tHttpOnly: true,\n\t\tSecure:   shouldUseSecureCookie(r, h.Config),\n\t\tSameSite: http.SameSiteLaxMode,\n\t})\n\n\t// --- Reset rate limits on success ---\n\th.RateLimiter.ResetLimit(ipKey)\n\th.RateLimiter.ResetLimit(userKey)\n\n\t// --- Audit log ---\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"user.login\",\n\t\tUsername:     strPtr(req.Username),\n\t\tConnectionID: strPtr(conn.ID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"Login via connection %s (role: %s, version: %s)\", conn.Name, role, testResult.Version)),\n\t\tIPAddress:    strPtr(clientIP),\n\t})\n\n\tslog.Info(\"User logged in\", \"user\", req.Username, \"role\", role, \"connection\", conn.Name)\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":            true,\n\t\t\"user\":               req.Username,\n\t\t\"user_role\":          role,\n\t\t\"clickhouse_version\": testResult.Version,\n\t\t\"expires_at\":         expiresAt,\n\t\t\"connection\": map[string]interface{}{\n\t\t\t\"id\":     conn.ID,\n\t\t\t\"name\":   conn.Name,\n\t\t\t\"online\": true,\n\t\t},\n\t\t\"session\": map[string]interface{}{\n\t\t\t\"user\":             req.Username,\n\t\t\t\"role\":             role,\n\t\t\t\"connectionId\":     conn.ID,\n\t\t\t\"connectionName\":   conn.Name,\n\t\t\t\"connectionOnline\": true,\n\t\t\t\"expiresAt\":        expiresAt,\n\t\t\t\"version\":          testResult.Version,\n\t\t\t\"appVersion\":       version.Version,\n\t\t},\n\t})\n}\n\n// ---------- POST /logout ----------\n\nfunc (h *AuthHandler) Logout(w http.ResponseWriter, r *http.Request) {\n\tcookie, err := r.Cookie(SessionCookie)\n\tif err != nil || cookie.Value == \"\" {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n\t\treturn\n\t}\n\n\tsession, _ := h.DB.GetSession(cookie.Value)\n\n\tif err := h.DB.DeleteSession(cookie.Value); err != nil {\n\t\tslog.Error(\"Failed to delete session\", \"error\", err)\n\t}\n\n\thttp.SetCookie(w, &http.Cookie{\n\t\tName:     SessionCookie,\n\t\tValue:    \"\",\n\t\tPath:     \"/\",\n\t\tMaxAge:   -1,\n\t\tHttpOnly: true,\n\t\tSecure:   shouldUseSecureCookie(r, h.Config),\n\t\tSameSite: http.SameSiteLaxMode,\n\t})\n\n\tif session != nil {\n\t\tclientIP := getClientIP(r)\n\t\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\t\tAction:    \"user.logout\",\n\t\t\tUsername:  strPtr(session.ClickhouseUser),\n\t\t\tIPAddress: strPtr(clientIP),\n\t\t})\n\t\tslog.Info(\"User logged out\", \"user\", session.ClickhouseUser)\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\n// ---------- GET /session ----------\n\nfunc (h *AuthHandler) Session(w http.ResponseWriter, r *http.Request) {\n\tcookie, err := r.Cookie(SessionCookie)\n\tif err != nil || cookie.Value == \"\" {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"authenticated\": false,\n\t\t})\n\t\treturn\n\t}\n\n\tsession, err := h.DB.GetSession(cookie.Value)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get session\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Session lookup failed\"})\n\t\treturn\n\t}\n\tif session == nil {\n\t\thttp.SetCookie(w, &http.Cookie{\n\t\t\tName:     SessionCookie,\n\t\t\tValue:    \"\",\n\t\t\tPath:     \"/\",\n\t\t\tMaxAge:   -1,\n\t\t\tHttpOnly: true,\n\t\t\tSecure:   shouldUseSecureCookie(r, h.Config),\n\t\t\tSameSite: http.SameSiteLaxMode,\n\t\t})\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"authenticated\": false,\n\t\t})\n\t\treturn\n\t}\n\n\tconnOnline := h.Gateway.IsTunnelOnline(session.ConnectionID)\n\tconnName := \"\"\n\ttc, _ := h.DB.GetConnectionByID(session.ConnectionID)\n\tif tc != nil {\n\t\tconnName = tc.Name\n\t}\n\n\trole := \"viewer\"\n\toverrideRole, roleErr := h.DB.GetUserRole(session.ClickhouseUser)\n\tif roleErr != nil {\n\t\tslog.Warn(\"Failed to resolve explicit role for session\", \"user\", session.ClickhouseUser, \"error\", roleErr)\n\t} else if overrideRole != \"\" {\n\t\trole = overrideRole\n\t} else if session.UserRole != nil {\n\t\trole = *session.UserRole\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"authenticated\": true,\n\t\t\"user\":          session.ClickhouseUser,\n\t\t\"user_role\":     role,\n\t\t\"expires_at\":    session.ExpiresAt,\n\t\t\"connection\": map[string]interface{}{\n\t\t\t\"id\":     session.ConnectionID,\n\t\t\t\"name\":   connName,\n\t\t\t\"online\": connOnline,\n\t\t},\n\t\t\"session\": map[string]interface{}{\n\t\t\t\"user\":             session.ClickhouseUser,\n\t\t\t\"role\":             role,\n\t\t\t\"connectionId\":     session.ConnectionID,\n\t\t\t\"connectionName\":   connName,\n\t\t\t\"connectionOnline\": connOnline,\n\t\t\t\"expiresAt\":        session.ExpiresAt,\n\t\t\t\"appVersion\":       version.Version,\n\t\t},\n\t})\n}\n\n// ---------- GET /connections ----------\n\nfunc (h *AuthHandler) Connections(w http.ResponseWriter, r *http.Request) {\n\tconnections, err := h.DB.GetConnections()\n\tif err != nil {\n\t\tslog.Error(\"Failed to get connections\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve connections\"})\n\t\treturn\n\t}\n\n\tresult := make([]connectionInfo, 0, len(connections))\n\tfor _, c := range connections {\n\t\tresult = append(result, connectionInfo{\n\t\t\tID:         c.ID,\n\t\t\tName:       c.Name,\n\t\t\tStatus:     c.Status,\n\t\t\tOnline:     h.Gateway.IsTunnelOnline(c.ID),\n\t\t\tLastSeenAt: c.LastSeenAt,\n\t\t\tCreatedAt:  c.CreatedAt,\n\t\t})\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"connections\": result,\n\t})\n}\n\n// ---------- POST /switch-connection ----------\n\nfunc (h *AuthHandler) SwitchConnection(w http.ResponseWriter, r *http.Request) {\n\tcookie, err := r.Cookie(SessionCookie)\n\tif err != nil || cookie.Value == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\texistingSession, err := h.DB.GetSession(cookie.Value)\n\tif err != nil || existingSession == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Session expired or invalid\"})\n\t\treturn\n\t}\n\n\tvar req switchConnectionRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\treq.ConnectionID = req.resolvedConnectionID()\n\tif strings.TrimSpace(req.Username) == \"\" {\n\t\treq.Username = existingSession.ClickhouseUser\n\t}\n\treq.Username = strings.TrimSpace(req.Username)\n\n\tif req.ConnectionID == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"connection_id (or connectionId) is required\"})\n\t\treturn\n\t}\n\n\tnewConn, err := h.DB.GetConnectionByID(req.ConnectionID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get connection\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve connection\"})\n\t\treturn\n\t}\n\tif newConn == nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Connection not found\"})\n\t\treturn\n\t}\n\n\t// Check tunnel is online (retry up to 3 times).\n\tonline := false\n\tfor attempt := 0; attempt < 3; attempt++ {\n\t\tif h.Gateway.IsTunnelOnline(newConn.ID) {\n\t\t\tonline = true\n\t\t\tbreak\n\t\t}\n\t\tif attempt < 2 {\n\t\t\ttime.Sleep(500 * time.Millisecond)\n\t\t}\n\t}\n\tif !online {\n\t\twriteJSON(w, http.StatusServiceUnavailable, map[string]string{\n\t\t\t\"error\":   \"Connection offline\",\n\t\t\t\"message\": \"The tunnel agent for this connection is not online. Please check that the agent is running.\",\n\t\t})\n\t\treturn\n\t}\n\n\ttestResult, err := h.Gateway.TestConnection(newConn.ID, req.Username, req.Password, 15*time.Second)\n\tif err != nil {\n\t\tslog.Info(\"Switch connection failed: test error\", \"user\", req.Username, \"error\", err)\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\n\t\t\t\"error\":   \"Authentication failed\",\n\t\t\t\"message\": sanitizeClickHouseAuthMessage(err.Error()),\n\t\t})\n\t\treturn\n\t}\n\tif !testResult.Success {\n\t\terrMsg := testResult.Error\n\t\tif errMsg == \"\" {\n\t\t\terrMsg = \"Invalid credentials\"\n\t\t}\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\n\t\t\t\"error\":   \"Authentication failed\",\n\t\t\t\"message\": sanitizeClickHouseAuthMessage(errMsg),\n\t\t})\n\t\treturn\n\t}\n\n\tclientIP := getClientIP(r)\n\trole := h.resolveUserRole(newConn.ID, req.Username, req.Password, clientIP)\n\n\tencryptedPwd, err := crypto.Encrypt(req.Password, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to encrypt password\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Internal server error\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.DeleteSession(cookie.Value); err != nil {\n\t\tslog.Error(\"Failed to delete old session\", \"error\", err)\n\t}\n\n\ttoken := uuid.NewString()\n\texpiresAt := time.Now().UTC().Add(SessionDuration).Format(time.RFC3339)\n\n\t_, err = h.DB.CreateSession(database.CreateSessionParams{\n\t\tConnectionID:      newConn.ID,\n\t\tClickhouseUser:    req.Username,\n\t\tEncryptedPassword: encryptedPwd,\n\t\tToken:             token,\n\t\tExpiresAt:         expiresAt,\n\t\tUserRole:          role,\n\t})\n\tif err != nil {\n\t\tslog.Error(\"Failed to create session\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create session\"})\n\t\treturn\n\t}\n\n\thttp.SetCookie(w, &http.Cookie{\n\t\tName:     SessionCookie,\n\t\tValue:    token,\n\t\tPath:     \"/\",\n\t\tMaxAge:   int(SessionDuration.Seconds()),\n\t\tHttpOnly: true,\n\t\tSecure:   shouldUseSecureCookie(r, h.Config),\n\t\tSameSite: http.SameSiteLaxMode,\n\t})\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"user.switch_connection\",\n\t\tUsername:     strPtr(req.Username),\n\t\tConnectionID: strPtr(newConn.ID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"Switched to connection %s (role: %s)\", newConn.Name, role)),\n\t\tIPAddress:    strPtr(clientIP),\n\t})\n\n\tslog.Info(\"User switched connection\", \"user\", req.Username, \"connection\", newConn.Name, \"role\", role)\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":            true,\n\t\t\"user\":               req.Username,\n\t\t\"user_role\":          role,\n\t\t\"clickhouse_version\": testResult.Version,\n\t\t\"expires_at\":         expiresAt,\n\t\t\"connection\": map[string]interface{}{\n\t\t\t\"id\":     newConn.ID,\n\t\t\t\"name\":   newConn.Name,\n\t\t\t\"online\": true,\n\t\t},\n\t\t\"session\": map[string]interface{}{\n\t\t\t\"user\":             req.Username,\n\t\t\t\"role\":             role,\n\t\t\t\"connectionId\":     newConn.ID,\n\t\t\t\"connectionName\":   newConn.Name,\n\t\t\t\"connectionOnline\": true,\n\t\t\t\"expiresAt\":        expiresAt,\n\t\t\t\"version\":          testResult.Version,\n\t\t\t\"appVersion\":       version.Version,\n\t\t},\n\t})\n}\n\n// ---------- ClickHouse role detection ----------\n\nfunc (h *AuthHandler) detectClickHouseRole(connectionID, username, password string) string {\n\tvar err error\n\t_, err = h.Gateway.ExecuteQuery(\n\t\tconnectionID,\n\t\t\"SELECT 1 FROM system.users LIMIT 1\",\n\t\tusername, password,\n\t\t10*time.Second,\n\t)\n\tif err == nil {\n\t\tslog.Debug(\"Role detected as admin (system.users accessible)\", \"user\", username)\n\t\treturn \"admin\"\n\t}\n\n\terrStr := err.Error()\n\n\tif !isPermissionError(errStr) {\n\t\tslog.Debug(\"Role defaulting to viewer (non-permission error from system.users)\", \"user\", username, \"error\", errStr)\n\t\treturn \"viewer\"\n\t}\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tconnectionID,\n\t\tfmt.Sprintf(\"SELECT access_type FROM system.grants WHERE user_name = '%s'\", escapeSingleQuotes(username)),\n\t\tusername, password,\n\t\t10*time.Second,\n\t)\n\tif err != nil {\n\t\tslog.Debug(\"Role defaulting to viewer (system.grants query failed)\", \"user\", username, \"error\", err)\n\t\treturn \"viewer\"\n\t}\n\n\trole := classifyGrants(result)\n\tslog.Debug(\"Role detected from grants\", \"user\", username, \"role\", role)\n\treturn role\n}\n\nfunc (h *AuthHandler) resolveUserRole(connectionID, username, password, clientIP string) string {\n\tmanualRole, err := h.DB.GetUserRole(username)\n\tif err == nil && manualRole != \"\" {\n\t\tslog.Debug(\"Using manually assigned role\", \"user\", username, \"role\", manualRole)\n\t\treturn manualRole\n\t}\n\tif err != nil {\n\t\tslog.Warn(\"Failed to read manual role override\", \"user\", username, \"error\", err)\n\t}\n\n\tdetectedRole := h.detectClickHouseRole(connectionID, username, password)\n\tif detectedRole != \"admin\" {\n\t\treturn detectedRole\n\t}\n\n\tadminCount, err := h.DB.CountUsersWithRole(\"admin\")\n\tif err != nil {\n\t\tslog.Warn(\"Failed to count admin overrides; denying implicit admin\", \"user\", username, \"error\", err)\n\t\treturn \"viewer\"\n\t}\n\n\tif adminCount == 0 {\n\t\tif err := h.DB.SetUserRole(username, \"admin\"); err != nil {\n\t\t\tslog.Warn(\"Failed to bootstrap first admin role\", \"user\", username, \"error\", err)\n\t\t\treturn \"viewer\"\n\t\t}\n\n\t\t_ = h.DB.CreateAuditLog(database.AuditLogParams{\n\t\t\tAction:       \"user_role.bootstrap_admin\",\n\t\t\tUsername:     strPtr(username),\n\t\t\tDetails:      strPtr(\"Automatically granted first explicit admin role from ClickHouse admin login\"),\n\t\t\tIPAddress:    strPtr(clientIP),\n\t\t\tConnectionID: strPtr(connectionID),\n\t\t})\n\n\t\tslog.Info(\"Bootstrapped first explicit admin role\", \"user\", username)\n\t\treturn \"admin\"\n\t}\n\n\tslog.Info(\"Ignoring implicit ClickHouse admin privileges without explicit CH-UI admin override\", \"user\", username)\n\treturn \"viewer\"\n}\n\nfunc classifyGrants(result *tunnel.QueryResult) string {\n\tif result == nil || len(result.Data) == 0 {\n\t\treturn \"viewer\"\n\t}\n\n\tvar rows []map[string]interface{}\n\tif err := json.Unmarshal(result.Data, &rows); err != nil {\n\t\treturn \"viewer\"\n\t}\n\n\tadminGrants := map[string]bool{\n\t\t\"ALL\": true, \"CREATE\": true, \"CREATE DATABASE\": true,\n\t\t\"CREATE TABLE\": true, \"ALTER\": true, \"DROP\": true, \"SYSTEM\": true,\n\t}\n\tanalystGrants := map[string]bool{\n\t\t\"INSERT\": true, \"DELETE\": true, \"ALTER TABLE\": true,\n\t\t\"CREATE TEMPORARY TABLE\": true,\n\t}\n\n\thasAdmin := false\n\thasAnalyst := false\n\n\tfor _, row := range rows {\n\t\taccessType, ok := row[\"access_type\"].(string)\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\t\tupper := strings.ToUpper(strings.TrimSpace(accessType))\n\t\tif adminGrants[upper] {\n\t\t\thasAdmin = true\n\t\t}\n\t\tif analystGrants[upper] {\n\t\t\thasAnalyst = true\n\t\t}\n\t}\n\n\tif hasAdmin {\n\t\treturn \"admin\"\n\t}\n\tif hasAnalyst {\n\t\treturn \"analyst\"\n\t}\n\treturn \"viewer\"\n}\n\nfunc isPermissionError(errStr string) bool {\n\tlower := strings.ToLower(errStr)\n\treturn strings.Contains(lower, \"access_denied\") ||\n\t\tstrings.Contains(lower, \"access denied\") ||\n\t\tstrings.Contains(lower, \"not enough privileges\") ||\n\t\tstrings.Contains(lower, \"permission denied\") ||\n\t\tstrings.Contains(lower, \"code: 497\")\n}\n\nfunc escapeSingleQuotes(s string) string {\n\t// ClickHouse uses '' (doubled single-quote) to escape, not \\'\n\t// Also escape backslashes to prevent escape-sequence bypasses\n\ts = strings.ReplaceAll(s, \"\\\\\", \"\\\\\\\\\")\n\treturn strings.ReplaceAll(s, \"'\", \"''\")\n}\n\n// ---------- helpers ----------\n\nfunc getClientIP(r *http.Request) string {\n\t// Only trust proxy headers if the request appears to come through a reverse proxy.\n\t// Check X-Forwarded-For only when a proxy indicator is present (TLS termination or\n\t// the presence of X-Forwarded-Proto, which is typically set by trusted proxies).\n\tif r.Header.Get(\"X-Forwarded-Proto\") != \"\" || r.TLS != nil {\n\t\tif xff := r.Header.Get(\"X-Forwarded-For\"); xff != \"\" {\n\t\t\t// Take the leftmost (client) IP — the rightmost entries are added by proxies\n\t\t\tparts := strings.SplitN(xff, \",\", 2)\n\t\t\tip := strings.TrimSpace(parts[0])\n\t\t\tif ip != \"\" {\n\t\t\t\treturn ip\n\t\t\t}\n\t\t}\n\n\t\tif xri := r.Header.Get(\"X-Real-IP\"); xri != \"\" {\n\t\t\treturn strings.TrimSpace(xri)\n\t\t}\n\t}\n\n\taddr := r.RemoteAddr\n\tif idx := strings.LastIndex(addr, \":\"); idx != -1 {\n\t\treturn addr[:idx]\n\t}\n\treturn addr\n}\n"
  },
  {
    "path": "internal/server/handlers/auth_helpers_test.go",
    "content": "package handlers\n\nimport \"testing\"\n\nfunc TestUserRateLimitKeyScopedByConnection(t *testing.T) {\n\tk1 := userRateLimitKey(\"Default\", \"conn-a\")\n\tk2 := userRateLimitKey(\"default\", \"conn-b\")\n\n\tif k1 == k2 {\n\t\tt.Fatalf(\"user rate limit key must include connection scope\")\n\t}\n\n\tif k1 != \"user:default:conn-a\" {\n\t\tt.Fatalf(\"unexpected normalized key: %s\", k1)\n\t}\n}\n\nfunc TestSanitizeClickHouseAuthMessage(t *testing.T) {\n\ttests := []struct {\n\t\tname string\n\t\traw  string\n\t\twant string\n\t}{\n\t\t{name: \"credentials\", raw: \"Code: 516. DB::Exception: Authentication failed\", want: \"Invalid credentials\"},\n\t\t{name: \"network\", raw: \"dial tcp 127.0.0.1:8123: connection refused\", want: \"Connection to ClickHouse failed\"},\n\t\t{name: \"empty\", raw: \"\", want: \"Invalid credentials\"},\n\t\t{name: \"fallback\", raw: \"unexpected upstream response\", want: \"Authentication failed\"},\n\t}\n\n\tfor _, tc := range tests {\n\t\tt.Run(tc.name, func(t *testing.T) {\n\t\t\tgot := sanitizeClickHouseAuthMessage(tc.raw)\n\t\t\tif got != tc.want {\n\t\t\t\tt.Fatalf(\"unexpected sanitized message: got %q want %q\", got, tc.want)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "internal/server/handlers/brain.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"regexp\"\n\t\"strings\"\n\t\"time\"\n\n\tbraincore \"github.com/caioricciuti/ch-ui/internal/brain\"\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/langfuse\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n\t\"github.com/caioricciuti/ch-ui/internal/version\"\n\t\"github.com/go-chi/chi/v5\"\n\t\"github.com/google/uuid\"\n)\n\nconst baseBrainPrompt = `You are Brain, an expert ClickHouse assistant for analytics teams.\n\nCore behavior:\n- Prioritize correctness over verbosity.\n- Provide SQL first when the user asks for query help.\n- Keep default queries safe: LIMIT 100 for exploratory SELECT queries.\n- Ask one concise clarification if schema/context is insufficient.\n- Reuse prior chat context and artifacts when relevant.\n\nOutput style:\n1) One sentence acknowledging intent.\n2) SQL in a fenced sql block when applicable.\n3) Short explanation and optional alternatives.`\n\n// BrainHandler handles Brain chat, persistence, and artifacts.\ntype BrainHandler struct {\n\tDB       *database.DB\n\tGateway  *tunnel.Gateway\n\tConfig   *config.Config\n\tLangfuse *langfuse.Client // nil when disabled\n}\n\nfunc (h *BrainHandler) Routes(r chi.Router) {\n\tr.Get(\"/models\", h.ListModels)\n\tr.Get(\"/skills\", h.GetSkill)\n\tr.Get(\"/chats\", h.ListChats)\n\tr.Post(\"/chats\", h.CreateChat)\n\tr.Get(\"/chats/{chatID}\", h.GetChat)\n\tr.Put(\"/chats/{chatID}\", h.UpdateChat)\n\tr.Delete(\"/chats/{chatID}\", h.DeleteChat)\n\tr.Get(\"/chats/{chatID}/messages\", h.ListMessages)\n\tr.Post(\"/chats/{chatID}/messages/stream\", h.StreamMessage)\n\tr.Get(\"/chats/{chatID}/artifacts\", h.ListArtifacts)\n\tr.Post(\"/chats/{chatID}/artifacts/query\", h.RunQueryArtifact)\n\n\t// Legacy endpoint kept for compatibility with older UI.\n\tr.Post(\"/chat\", h.LegacyChat)\n}\n\ntype schemaColumn struct {\n\tName string `json:\"name\"`\n\tType string `json:\"type\"`\n}\n\ntype schemaContext struct {\n\tDatabase   string         `json:\"database\"`\n\tTable      string         `json:\"table\"`\n\tColumns    []schemaColumn `json:\"columns\"`\n\tSampleData interface{}    `json:\"sampleData\"`\n}\n\ntype createChatRequest struct {\n\tTitle   string `json:\"title\"`\n\tModelID string `json:\"modelId\"`\n}\n\ntype updateChatRequest struct {\n\tTitle           *string `json:\"title\"`\n\tArchived        *bool   `json:\"archived\"`\n\tModelID         *string `json:\"modelId\"`\n\tContextDatabase *string `json:\"contextDatabase\"`\n\tContextTable    *string `json:\"contextTable\"`\n\tContextTables   *string `json:\"contextTables\"`\n}\n\ntype streamMessageRequest struct {\n\tContent        string          `json:\"content\"`\n\tModelID        string          `json:\"modelId\"`\n\tSchemaContext  *schemaContext  `json:\"schemaContext,omitempty\"`\n\tSchemaContexts []schemaContext `json:\"schemaContexts,omitempty\"`\n}\n\ntype runQueryArtifactRequest struct {\n\tQuery     string `json:\"query\"`\n\tTitle     string `json:\"title\"`\n\tMessageID string `json:\"messageId\"`\n\tTimeout   int    `json:\"timeout\"`\n}\n\nfunc (h *BrainHandler) ListModels(w http.ResponseWriter, r *http.Request) {\n\tmodels, err := h.DB.GetBrainModelsWithProvider(true)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list Brain models\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load models\")\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\": true,\n\t\t\"models\":  models,\n\t})\n}\n\nfunc (h *BrainHandler) GetSkill(w http.ResponseWriter, r *http.Request) {\n\tskill, err := h.DB.GetActiveBrainSkill()\n\tif err != nil {\n\t\tslog.Error(\"Failed to load active Brain skill\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load active skill\")\n\t\treturn\n\t}\n\tif skill == nil {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"skill\": nil})\n\t\treturn\n\t}\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"skill\": skill})\n}\n\nfunc (h *BrainHandler) ListChats(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tincludeArchived := strings.EqualFold(strings.TrimSpace(r.URL.Query().Get(\"includeArchived\")), \"true\")\n\tchats, err := h.DB.GetBrainChatsByUser(session.ClickhouseUser, session.ConnectionID, includeArchived)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list Brain chats\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load chats\")\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\": true,\n\t\t\"chats\":   chats,\n\t})\n}\n\nfunc (h *BrainHandler) CreateChat(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tvar body createChatRequest\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\ttitle := strings.TrimSpace(body.Title)\n\tif title == \"\" {\n\t\ttitle = \"New Chat\"\n\t}\n\n\tproviderID := \"\"\n\tmodelID := strings.TrimSpace(body.ModelID)\n\tif modelID != \"\" {\n\t\trt, err := h.DB.GetBrainModelRuntimeByID(modelID)\n\t\tif err != nil {\n\t\t\twriteError(w, http.StatusInternalServerError, \"Failed to resolve model\")\n\t\t\treturn\n\t\t}\n\t\tif rt == nil || !rt.ModelActive || !rt.ProviderActive {\n\t\t\twriteError(w, http.StatusBadRequest, \"Model is not available\")\n\t\t\treturn\n\t\t}\n\t\tproviderID = rt.ProviderID\n\t}\n\n\tchatID, err := h.DB.CreateBrainChat(session.ClickhouseUser, session.ConnectionID, title, providerID, modelID, \"\", \"\", \"\")\n\tif err != nil {\n\t\tslog.Error(\"Failed to create Brain chat\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to create chat\")\n\t\treturn\n\t}\n\n\tchat, err := h.DB.GetBrainChatByIDForUser(chatID, session.ClickhouseUser)\n\tif err != nil || chat == nil {\n\t\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"success\": true, \"id\": chatID})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"success\": true, \"chat\": chat})\n}\n\nfunc (h *BrainHandler) GetChat(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tchatID := chi.URLParam(r, \"chatID\")\n\tchat, err := h.DB.GetBrainChatByIDForUser(chatID, session.ClickhouseUser)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load chat\")\n\t\treturn\n\t}\n\tif chat == nil {\n\t\twriteError(w, http.StatusNotFound, \"Chat not found\")\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"chat\": chat})\n}\n\nfunc (h *BrainHandler) UpdateChat(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tchatID := chi.URLParam(r, \"chatID\")\n\tchat, err := h.DB.GetBrainChatByIDForUser(chatID, session.ClickhouseUser)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load chat\")\n\t\treturn\n\t}\n\tif chat == nil {\n\t\twriteError(w, http.StatusNotFound, \"Chat not found\")\n\t\treturn\n\t}\n\n\tvar body updateChatRequest\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\ttitle := chat.Title\n\tif body.Title != nil {\n\t\tif strings.TrimSpace(*body.Title) != \"\" {\n\t\t\ttitle = strings.TrimSpace(*body.Title)\n\t\t}\n\t}\n\n\tarchived := chat.Archived\n\tif body.Archived != nil {\n\t\tarchived = *body.Archived\n\t}\n\n\tproviderID := \"\"\n\tif chat.ProviderID != nil {\n\t\tproviderID = *chat.ProviderID\n\t}\n\tmodelID := \"\"\n\tif chat.ModelID != nil {\n\t\tmodelID = *chat.ModelID\n\t}\n\n\tif body.ModelID != nil {\n\t\tmodelID = strings.TrimSpace(*body.ModelID)\n\t\tproviderID = \"\"\n\t\tif modelID != \"\" {\n\t\t\trt, err := h.DB.GetBrainModelRuntimeByID(modelID)\n\t\t\tif err != nil {\n\t\t\t\twriteError(w, http.StatusInternalServerError, \"Failed to resolve model\")\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif rt == nil || !rt.ModelActive || !rt.ProviderActive {\n\t\t\t\twriteError(w, http.StatusBadRequest, \"Model is not available\")\n\t\t\t\treturn\n\t\t\t}\n\t\t\tproviderID = rt.ProviderID\n\t\t}\n\t}\n\n\tcontextDatabase := \"\"\n\tif chat.ContextDatabase != nil {\n\t\tcontextDatabase = *chat.ContextDatabase\n\t}\n\tcontextTable := \"\"\n\tif chat.ContextTable != nil {\n\t\tcontextTable = *chat.ContextTable\n\t}\n\tcontextTables := \"\"\n\tif chat.ContextTables != nil {\n\t\tcontextTables = *chat.ContextTables\n\t}\n\tif body.ContextDatabase != nil {\n\t\tcontextDatabase = strings.TrimSpace(*body.ContextDatabase)\n\t}\n\tif body.ContextTable != nil {\n\t\tcontextTable = strings.TrimSpace(*body.ContextTable)\n\t}\n\t// If database changes but table wasn't explicitly set, clear table\n\tif body.ContextDatabase != nil && body.ContextTable == nil {\n\t\tcontextTable = \"\"\n\t}\n\tif body.ContextTables != nil {\n\t\tcontextTables = strings.TrimSpace(*body.ContextTables)\n\t\t// When using new multi-context format, clear legacy fields\n\t\tif contextTables != \"\" {\n\t\t\tcontextDatabase = \"\"\n\t\t\tcontextTable = \"\"\n\t\t}\n\t}\n\n\tif err := h.DB.UpdateBrainChat(chatID, title, providerID, modelID, archived, contextDatabase, contextTable, contextTables); err != nil {\n\t\tslog.Error(\"Failed to update Brain chat\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to update chat\")\n\t\treturn\n\t}\n\n\tupdated, err := h.DB.GetBrainChatByIDForUser(chatID, session.ClickhouseUser)\n\tif err != nil || updated == nil {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n\t\treturn\n\t}\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"chat\": updated})\n}\n\nfunc (h *BrainHandler) DeleteChat(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tchatID := chi.URLParam(r, \"chatID\")\n\tchat, err := h.DB.GetBrainChatByIDForUser(chatID, session.ClickhouseUser)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load chat\")\n\t\treturn\n\t}\n\tif chat == nil {\n\t\twriteError(w, http.StatusNotFound, \"Chat not found\")\n\t\treturn\n\t}\n\n\tif err := h.DB.DeleteBrainChat(chatID); err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to delete chat\")\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *BrainHandler) ListMessages(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tchatID := chi.URLParam(r, \"chatID\")\n\tchat, err := h.DB.GetBrainChatByIDForUser(chatID, session.ClickhouseUser)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load chat\")\n\t\treturn\n\t}\n\tif chat == nil {\n\t\twriteError(w, http.StatusNotFound, \"Chat not found\")\n\t\treturn\n\t}\n\n\tmessages, err := h.DB.GetBrainMessages(chatID)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load messages\")\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"messages\": messages})\n}\n\nfunc (h *BrainHandler) ListArtifacts(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tchatID := chi.URLParam(r, \"chatID\")\n\tchat, err := h.DB.GetBrainChatByIDForUser(chatID, session.ClickhouseUser)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load chat\")\n\t\treturn\n\t}\n\tif chat == nil {\n\t\twriteError(w, http.StatusNotFound, \"Chat not found\")\n\t\treturn\n\t}\n\n\tartifacts, err := h.DB.GetBrainArtifacts(chatID)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load artifacts\")\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"artifacts\": artifacts})\n}\n\nfunc (h *BrainHandler) RunQueryArtifact(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tchatID := chi.URLParam(r, \"chatID\")\n\tchat, err := h.DB.GetBrainChatByIDForUser(chatID, session.ClickhouseUser)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load chat\")\n\t\treturn\n\t}\n\tif chat == nil {\n\t\twriteError(w, http.StatusNotFound, \"Chat not found\")\n\t\treturn\n\t}\n\n\tvar body runQueryArtifactRequest\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tquery := strings.TrimSpace(body.Query)\n\tif query == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"Query is required\")\n\t\treturn\n\t}\n\tif !isBrainReadOnlyQuery(query) {\n\t\twriteError(w, http.StatusBadRequest, \"Only read-only queries are allowed in Brain chat artifacts\")\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\ttimeout := 30 * time.Second\n\tif body.Timeout > 0 {\n\t\ttimeout = time.Duration(body.Timeout) * time.Second\n\t}\n\tif timeout > 5*time.Minute {\n\t\ttimeout = 5 * time.Minute\n\t}\n\n\tresult, err := h.Gateway.ExecuteQuery(session.ConnectionID, query, session.ClickhouseUser, password, timeout)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadGateway, err.Error())\n\t\treturn\n\t}\n\n\tartifactPayload, _ := json.Marshal(map[string]interface{}{\n\t\t\"query\":      query,\n\t\t\"data\":       json.RawMessage(result.Data),\n\t\t\"meta\":       json.RawMessage(result.Meta),\n\t\t\"statistics\": json.RawMessage(result.Stats),\n\t})\n\n\ttitle := strings.TrimSpace(body.Title)\n\tif title == \"\" {\n\t\ttitle = \"Query Result\"\n\t}\n\n\tartifactID, err := h.DB.CreateBrainArtifact(chatID, body.MessageID, \"query_result\", title, string(artifactPayload), session.ClickhouseUser)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to store artifact\")\n\t\treturn\n\t}\n\n\ttoolInput, _ := json.Marshal(map[string]interface{}{\"query\": query})\n\ttoolOutput, _ := json.Marshal(map[string]interface{}{\"artifact_id\": artifactID})\n\tif strings.TrimSpace(body.MessageID) != \"\" {\n\t\t_, _ = h.DB.CreateBrainToolCall(chatID, body.MessageID, \"run_readonly_query\", string(toolInput), string(toolOutput), \"success\", \"\")\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"brain.query.run\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(title),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":     true,\n\t\t\"artifact_id\": artifactID,\n\t\t\"result\": map[string]interface{}{\n\t\t\t\"data\":  result.Data,\n\t\t\t\"meta\":  result.Meta,\n\t\t\t\"stats\": result.Stats,\n\t\t},\n\t})\n}\n\nfunc (h *BrainHandler) StreamMessage(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tchatID := chi.URLParam(r, \"chatID\")\n\tchat, err := h.DB.GetBrainChatByIDForUser(chatID, session.ClickhouseUser)\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load chat\")\n\t\treturn\n\t}\n\tif chat == nil {\n\t\twriteError(w, http.StatusNotFound, \"Chat not found\")\n\t\treturn\n\t}\n\n\tvar body streamMessageRequest\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tprompt := strings.TrimSpace(body.Content)\n\tif prompt == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"Message content is required\")\n\t\treturn\n\t}\n\n\tuserMessageID, err := h.DB.CreateBrainMessage(chatID, \"user\", prompt, \"complete\", \"\")\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to persist user message\")\n\t\treturn\n\t}\n\n\tassistantMessageID, err := h.DB.CreateBrainMessage(chatID, \"assistant\", \"\", \"streaming\", \"\")\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to persist assistant message\")\n\t\treturn\n\t}\n\n\truntimeModel, err := h.resolveRuntimeModel(chat, strings.TrimSpace(body.ModelID))\n\tif err != nil {\n\t\t_ = h.DB.UpdateBrainMessage(assistantMessageID, \"\", \"error\", err.Error())\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\n\tprovider, err := braincore.NewProvider(runtimeModel.ProviderKind)\n\tif err != nil {\n\t\t_ = h.DB.UpdateBrainMessage(assistantMessageID, \"\", \"error\", err.Error())\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\n\tproviderCfg := braincore.ProviderConfig{\n\t\tKind: runtimeModel.ProviderKind,\n\t}\n\tif runtimeModel.ProviderBaseURL != nil {\n\t\tproviderCfg.BaseURL = *runtimeModel.ProviderBaseURL\n\t}\n\tif runtimeModel.ProviderEncryptedKey != nil {\n\t\tdecrypted, decErr := crypto.Decrypt(*runtimeModel.ProviderEncryptedKey, h.Config.AppSecretKey)\n\t\tif decErr != nil {\n\t\t\t_ = h.DB.UpdateBrainMessage(assistantMessageID, \"\", \"error\", \"Failed to decrypt provider API key\")\n\t\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt provider API key\")\n\t\t\treturn\n\t\t}\n\t\tproviderCfg.APIKey = decrypted\n\t}\n\n\thistory, err := h.DB.GetBrainMessages(chatID)\n\tif err != nil {\n\t\t_ = h.DB.UpdateBrainMessage(assistantMessageID, \"\", \"error\", \"Failed to load chat history\")\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to load chat history\")\n\t\treturn\n\t}\n\n\t// Merge single SchemaContext (legacy) with SchemaContexts array\n\tvar allContexts []schemaContext\n\tif body.SchemaContext != nil {\n\t\tallContexts = append(allContexts, *body.SchemaContext)\n\t}\n\tfor _, sc := range body.SchemaContexts {\n\t\t// Dedupe: skip if already present from legacy field\n\t\tdup := false\n\t\tfor _, existing := range allContexts {\n\t\t\tif existing.Database == sc.Database && existing.Table == sc.Table {\n\t\t\t\tdup = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !dup {\n\t\t\tallContexts = append(allContexts, sc)\n\t\t}\n\t}\n\n\tproviderMessages := make([]braincore.Message, 0, len(history)+1)\n\tsystemPrompt := h.buildSystemPrompt(allContexts)\n\tproviderMessages = append(providerMessages, braincore.Message{Role: \"system\", Content: systemPrompt})\n\n\tfor _, msg := range history {\n\t\trole := strings.TrimSpace(strings.ToLower(msg.Role))\n\t\tif role != \"user\" && role != \"assistant\" {\n\t\t\tcontinue\n\t\t}\n\t\tif strings.TrimSpace(msg.Content) == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tif msg.Status == \"error\" {\n\t\t\tcontinue\n\t\t}\n\t\tproviderMessages = append(providerMessages, braincore.Message{Role: role, Content: msg.Content})\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"text/event-stream\")\n\tw.Header().Set(\"Cache-Control\", \"no-cache\")\n\tw.Header().Set(\"Connection\", \"keep-alive\")\n\n\tflusher, ok := w.(http.Flusher)\n\tif !ok {\n\t\t_ = h.DB.UpdateBrainMessage(assistantMessageID, \"\", \"error\", \"Streaming not supported\")\n\t\twriteError(w, http.StatusInternalServerError, \"Streaming not supported\")\n\t\treturn\n\t}\n\n\tvar built strings.Builder\n\tstreamStart := time.Now()\n\tchatResult, streamErr := provider.StreamChat(r.Context(), providerCfg, runtimeModel.ModelName, providerMessages, func(delta string) error {\n\t\tif delta == \"\" {\n\t\t\treturn nil\n\t\t}\n\t\tbuilt.WriteString(delta)\n\t\treturn writeSSE(w, flusher, map[string]interface{}{\"type\": \"delta\", \"delta\": delta, \"messageId\": assistantMessageID})\n\t})\n\tstreamEnd := time.Now()\n\n\tif streamErr != nil {\n\t\terrMessage := streamErr.Error()\n\t\tif errMessage == \"\" {\n\t\t\terrMessage = \"Unknown provider error\"\n\t\t}\n\t\tmodelParameters := brainModelParameters(chatResult, runtimeModel.ProviderKind, runtimeModel.ModelName)\n\t\t_ = h.DB.UpdateBrainMessage(assistantMessageID, built.String(), \"error\", errMessage)\n\t\t_ = writeSSE(w, flusher, map[string]interface{}{\"type\": \"error\", \"error\": errMessage, \"messageId\": assistantMessageID})\n\n\t\t// Langfuse: trace error\n\t\tif h.Langfuse.IsEnabled() {\n\t\t\ttraceID := uuid.NewString()\n\t\t\th.Langfuse.LogTrace(langfuse.TraceParams{\n\t\t\t\tID: traceID, Name: \"brain.chat\",\n\t\t\t\tUserID: session.ClickhouseUser, SessionID: chatID,\n\t\t\t\tInput: prompt, Release: version.Version,\n\t\t\t\tTags:     []string{runtimeModel.ProviderKind, runtimeModel.ModelName, \"brain\", \"error\"},\n\t\t\t\tMetadata: map[string]string{\"connection_id\": session.ConnectionID},\n\t\t\t})\n\t\t\th.Langfuse.LogGeneration(langfuse.GenerationParams{\n\t\t\t\tID: uuid.NewString(), TraceID: traceID, Name: \"StreamChat\",\n\t\t\t\tModel:           runtimeModel.ModelName,\n\t\t\t\tModelParameters: modelParameters,\n\t\t\t\tInput:           providerMessages, Output: errMessage,\n\t\t\t\tStartTime: streamStart, EndTime: streamEnd, Level: \"ERROR\",\n\t\t\t})\n\t\t\th.Langfuse.LogEvent(langfuse.EventParams{\n\t\t\t\tTraceID: traceID, Name: \"stream_error\",\n\t\t\t\tInput: errMessage, Level: \"ERROR\",\n\t\t\t})\n\t\t}\n\t\treturn\n\t}\n\n\tassistantText := built.String()\n\tif strings.TrimSpace(assistantText) == \"\" {\n\t\tassistantText = \"I could not generate a response for that prompt.\"\n\t}\n\n\tif err := h.DB.UpdateBrainMessage(assistantMessageID, assistantText, \"complete\", \"\"); err != nil {\n\t\tslog.Warn(\"Failed to persist assistant message\", \"error\", err)\n\t}\n\tif err := h.DB.TouchBrainChat(chatID); err != nil {\n\t\tslog.Warn(\"Failed to update chat activity\", \"error\", err)\n\t}\n\n\ttitle := chat.Title\n\tif title == \"New Chat\" || strings.TrimSpace(title) == \"\" {\n\t\ttitle = autoTitle(prompt)\n\t}\n\tstreamCtxDB := \"\"\n\tif chat.ContextDatabase != nil {\n\t\tstreamCtxDB = *chat.ContextDatabase\n\t}\n\tstreamCtxTable := \"\"\n\tif chat.ContextTable != nil {\n\t\tstreamCtxTable = *chat.ContextTable\n\t}\n\tstreamCtxTables := \"\"\n\tif chat.ContextTables != nil {\n\t\tstreamCtxTables = *chat.ContextTables\n\t}\n\tif err := h.DB.UpdateBrainChat(chatID, title, runtimeModel.ProviderID, runtimeModel.ModelID, chat.Archived, streamCtxDB, streamCtxTable, streamCtxTables); err != nil {\n\t\tslog.Warn(\"Failed to update chat title/model\", \"error\", err)\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"brain.chat\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"chat=%s user_msg=%s\", chatID, userMessageID)),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\t// Langfuse: trace + generation + auto-scores\n\tif h.Langfuse.IsEnabled() {\n\t\ttraceID := uuid.NewString()\n\t\tmodelParameters := brainModelParameters(chatResult, runtimeModel.ProviderKind, runtimeModel.ModelName)\n\t\tmetadata := map[string]string{\n\t\t\t\"connection_id\": session.ConnectionID,\n\t\t\t\"provider_kind\": runtimeModel.ProviderKind,\n\t\t}\n\t\tif streamCtxDB != \"\" {\n\t\t\tmetadata[\"schema_database\"] = streamCtxDB\n\t\t}\n\t\tif streamCtxTable != \"\" {\n\t\t\tmetadata[\"schema_table\"] = streamCtxTable\n\t\t}\n\t\th.Langfuse.LogTrace(langfuse.TraceParams{\n\t\t\tID: traceID, Name: \"brain.chat\",\n\t\t\tUserID: session.ClickhouseUser, SessionID: chatID,\n\t\t\tInput: prompt, Output: assistantText,\n\t\t\tRelease:  version.Version,\n\t\t\tTags:     []string{runtimeModel.ProviderKind, runtimeModel.ModelName, \"brain\"},\n\t\t\tMetadata: metadata,\n\t\t})\n\n\t\tgenParams := langfuse.GenerationParams{\n\t\t\tID: uuid.NewString(), TraceID: traceID, Name: \"StreamChat\",\n\t\t\tModel:           runtimeModel.ModelName,\n\t\t\tModelParameters: modelParameters,\n\t\t\tInput:           providerMessages,\n\t\t\tOutput:          assistantText,\n\t\t\tStartTime:       streamStart, EndTime: streamEnd,\n\t\t}\n\t\tif chatResult != nil && (chatResult.InputTokens > 0 || chatResult.OutputTokens > 0) {\n\t\t\tgenParams.Usage = &langfuse.Usage{\n\t\t\t\tInput:  chatResult.InputTokens,\n\t\t\t\tOutput: chatResult.OutputTokens,\n\t\t\t\tTotal:  chatResult.InputTokens + chatResult.OutputTokens,\n\t\t\t}\n\t\t}\n\t\th.Langfuse.LogGeneration(genParams)\n\n\t\t// Auto-scores\n\t\tlatencyMs := float64(streamEnd.Sub(streamStart).Milliseconds())\n\t\th.Langfuse.LogScore(langfuse.ScoreParams{\n\t\t\tTraceID: traceID, Name: \"latency_ms\", Value: latencyMs, DataType: \"NUMERIC\",\n\t\t})\n\t\thasSQLVal := 0.0\n\t\tif containsSQL(assistantText) {\n\t\t\thasSQLVal = 1.0\n\t\t}\n\t\th.Langfuse.LogScore(langfuse.ScoreParams{\n\t\t\tTraceID: traceID, Name: \"has_sql\", Value: hasSQLVal, DataType: \"BOOLEAN\",\n\t\t})\n\t\tif chatResult != nil && chatResult.InputTokens > 0 {\n\t\t\tefficiency := float64(chatResult.OutputTokens) / float64(chatResult.InputTokens)\n\t\t\th.Langfuse.LogScore(langfuse.ScoreParams{\n\t\t\t\tTraceID: traceID, Name: \"token_efficiency\", Value: efficiency, DataType: \"NUMERIC\",\n\t\t\t\tComment: \"output_tokens / input_tokens\",\n\t\t\t})\n\t\t}\n\t}\n\n\t_ = writeSSE(w, flusher, map[string]interface{}{\"type\": \"done\", \"messageId\": assistantMessageID, \"chatId\": chatID})\n}\n\nfunc (h *BrainHandler) LegacyChat(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tvar req struct {\n\t\tMessages []struct {\n\t\t\tRole    string `json:\"role\"`\n\t\t\tContent string `json:\"content\"`\n\t\t} `json:\"messages\"`\n\t\tSchemaContext *schemaContext `json:\"schemaContext,omitempty\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\tif len(req.Messages) == 0 {\n\t\twriteError(w, http.StatusBadRequest, \"Messages are required\")\n\t\treturn\n\t}\n\n\trt, err := h.DB.GetDefaultBrainModelRuntime()\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to resolve model\")\n\t\treturn\n\t}\n\tif rt == nil {\n\t\twriteError(w, http.StatusBadRequest, \"No active AI model configured by admin\")\n\t\treturn\n\t}\n\n\tprovider, err := braincore.NewProvider(rt.ProviderKind)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\n\tcfg := braincore.ProviderConfig{Kind: rt.ProviderKind}\n\tif rt.ProviderBaseURL != nil {\n\t\tcfg.BaseURL = *rt.ProviderBaseURL\n\t}\n\tif rt.ProviderEncryptedKey != nil {\n\t\tdecrypted, decErr := crypto.Decrypt(*rt.ProviderEncryptedKey, h.Config.AppSecretKey)\n\t\tif decErr != nil {\n\t\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt provider API key\")\n\t\t\treturn\n\t\t}\n\t\tcfg.APIKey = decrypted\n\t}\n\n\tvar legacyContexts []schemaContext\n\tif req.SchemaContext != nil {\n\t\tlegacyContexts = append(legacyContexts, *req.SchemaContext)\n\t}\n\tmessages := make([]braincore.Message, 0, len(req.Messages)+1)\n\tmessages = append(messages, braincore.Message{Role: \"system\", Content: h.buildSystemPrompt(legacyContexts)})\n\tfor _, msg := range req.Messages {\n\t\trole := strings.ToLower(strings.TrimSpace(msg.Role))\n\t\tif role != \"user\" && role != \"assistant\" {\n\t\t\tcontinue\n\t\t}\n\t\tmessages = append(messages, braincore.Message{Role: role, Content: msg.Content})\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"text/event-stream\")\n\tw.Header().Set(\"Cache-Control\", \"no-cache\")\n\tw.Header().Set(\"Connection\", \"keep-alive\")\n\n\tflusher, ok := w.(http.Flusher)\n\tif !ok {\n\t\twriteError(w, http.StatusInternalServerError, \"Streaming not supported\")\n\t\treturn\n\t}\n\n\tvar built strings.Builder\n\tstreamStart := time.Now()\n\tchatResult, streamErr := provider.StreamChat(r.Context(), cfg, rt.ModelName, messages, func(delta string) error {\n\t\tbuilt.WriteString(delta)\n\t\treturn writeSSE(w, flusher, map[string]interface{}{\"type\": \"delta\", \"delta\": delta})\n\t})\n\tstreamEnd := time.Now()\n\n\tif streamErr != nil {\n\t\tmodelParameters := brainModelParameters(chatResult, rt.ProviderKind, rt.ModelName)\n\t\t_ = writeSSE(w, flusher, map[string]interface{}{\"type\": \"error\", \"error\": streamErr.Error()})\n\n\t\t// Langfuse: trace legacy error\n\t\tif h.Langfuse.IsEnabled() {\n\t\t\ttraceID := uuid.NewString()\n\t\t\th.Langfuse.LogTrace(langfuse.TraceParams{\n\t\t\t\tID: traceID, Name: \"brain.legacy_chat\",\n\t\t\t\tUserID: session.ClickhouseUser, Release: version.Version,\n\t\t\t\tTags: []string{rt.ProviderKind, rt.ModelName, \"brain\", \"legacy\", \"error\"},\n\t\t\t})\n\t\t\th.Langfuse.LogGeneration(langfuse.GenerationParams{\n\t\t\t\tID: uuid.NewString(), TraceID: traceID, Name: \"StreamChat\",\n\t\t\t\tModel: rt.ModelName, Input: messages, Output: streamErr.Error(),\n\t\t\t\tModelParameters: modelParameters,\n\t\t\t\tStartTime:       streamStart, EndTime: streamEnd, Level: \"ERROR\",\n\t\t\t})\n\t\t}\n\t\treturn\n\t}\n\n\t// Langfuse: trace legacy success\n\tif h.Langfuse.IsEnabled() {\n\t\ttraceID := uuid.NewString()\n\t\tmodelParameters := brainModelParameters(chatResult, rt.ProviderKind, rt.ModelName)\n\t\th.Langfuse.LogTrace(langfuse.TraceParams{\n\t\t\tID: traceID, Name: \"brain.legacy_chat\",\n\t\t\tUserID: session.ClickhouseUser, Release: version.Version,\n\t\t\tTags: []string{rt.ProviderKind, rt.ModelName, \"brain\", \"legacy\"},\n\t\t})\n\t\tgenParams := langfuse.GenerationParams{\n\t\t\tID: uuid.NewString(), TraceID: traceID, Name: \"StreamChat\",\n\t\t\tModel:           rt.ModelName,\n\t\t\tModelParameters: modelParameters,\n\t\t\tInput:           messages, Output: built.String(),\n\t\t\tStartTime: streamStart, EndTime: streamEnd,\n\t\t}\n\t\tif chatResult != nil && (chatResult.InputTokens > 0 || chatResult.OutputTokens > 0) {\n\t\t\tgenParams.Usage = &langfuse.Usage{\n\t\t\t\tInput:  chatResult.InputTokens,\n\t\t\t\tOutput: chatResult.OutputTokens,\n\t\t\t\tTotal:  chatResult.InputTokens + chatResult.OutputTokens,\n\t\t\t}\n\t\t}\n\t\th.Langfuse.LogGeneration(genParams)\n\n\t\tlatencyMs := float64(streamEnd.Sub(streamStart).Milliseconds())\n\t\th.Langfuse.LogScore(langfuse.ScoreParams{\n\t\t\tTraceID: traceID, Name: \"latency_ms\", Value: latencyMs, DataType: \"NUMERIC\",\n\t\t})\n\t}\n\n\t_ = writeSSE(w, flusher, map[string]interface{}{\"type\": \"done\"})\n}\n\nfunc brainModelParameters(result *braincore.ChatResult, providerKind, model string) map[string]interface{} {\n\tif result != nil && result.ModelParameters != nil {\n\t\treturn result.ModelParameters\n\t}\n\treturn braincore.DefaultModelParameters(providerKind, model)\n}\n\nfunc (h *BrainHandler) resolveRuntimeModel(chat *database.BrainChat, requestedModelID string) (*database.BrainModelRuntime, error) {\n\tif requestedModelID != \"\" {\n\t\trt, err := h.DB.GetBrainModelRuntimeByID(requestedModelID)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"failed to resolve model\")\n\t\t}\n\t\tif rt == nil || !rt.ModelActive || !rt.ProviderActive {\n\t\t\treturn nil, fmt.Errorf(\"selected model is not active\")\n\t\t}\n\t\treturn rt, nil\n\t}\n\n\tif chat != nil && chat.ModelID != nil && strings.TrimSpace(*chat.ModelID) != \"\" {\n\t\trt, err := h.DB.GetBrainModelRuntimeByID(*chat.ModelID)\n\t\tif err == nil && rt != nil && rt.ModelActive && rt.ProviderActive {\n\t\t\treturn rt, nil\n\t\t}\n\t}\n\n\trt, err := h.DB.GetDefaultBrainModelRuntime()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to load default model\")\n\t}\n\tif rt == nil {\n\t\treturn nil, fmt.Errorf(\"no active AI model configured by admin\")\n\t}\n\treturn rt, nil\n}\n\nfunc (h *BrainHandler) buildSystemPrompt(contexts []schemaContext) string {\n\tskillPrompt := \"\"\n\tskill, err := h.DB.GetActiveBrainSkill()\n\tif err == nil && skill != nil {\n\t\tskillPrompt = strings.TrimSpace(skill.Content)\n\t}\n\n\tprompt := baseBrainPrompt\n\tif skillPrompt != \"\" {\n\t\tprompt += \"\\n\\nActive skills:\\n\" + skillPrompt\n\t}\n\tif len(contexts) > 0 {\n\t\tprompt += buildMultiSchemaPrompt(contexts)\n\t}\n\treturn prompt\n}\n\nfunc buildMultiSchemaPrompt(contexts []schemaContext) string {\n\tvar sb strings.Builder\n\tsb.WriteString(\"\\n\\nSchema context:\\n\")\n\tfor i, sc := range contexts {\n\t\tif i > 0 {\n\t\t\tsb.WriteString(\"\\n\")\n\t\t}\n\t\tlabel := \"\"\n\t\tif sc.Database != \"\" && sc.Table != \"\" {\n\t\t\tlabel = sc.Database + \".\" + sc.Table\n\t\t} else if sc.Database != \"\" {\n\t\t\tlabel = sc.Database\n\t\t} else if sc.Table != \"\" {\n\t\t\tlabel = sc.Table\n\t\t}\n\t\tsb.WriteString(fmt.Sprintf(\"Table %d: %s\\n\", i+1, label))\n\t\tif len(sc.Columns) > 0 {\n\t\t\tsb.WriteString(\"Columns:\\n\")\n\t\t\tfor _, col := range sc.Columns {\n\t\t\t\tsb.WriteString(\"- \" + col.Name + \" (\" + col.Type + \")\\n\")\n\t\t\t}\n\t\t}\n\t}\n\treturn sb.String()\n}\n\nfunc writeSSE(w http.ResponseWriter, flusher http.Flusher, payload map[string]interface{}) error {\n\tb, err := json.Marshal(payload)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif _, err := fmt.Fprintf(w, \"data: %s\\n\\n\", b); err != nil {\n\t\treturn err\n\t}\n\tflusher.Flush()\n\treturn nil\n}\n\nfunc autoTitle(prompt string) string {\n\tt := strings.TrimSpace(prompt)\n\tif t == \"\" {\n\t\treturn \"New Chat\"\n\t}\n\tif len(t) <= 48 {\n\t\treturn t\n\t}\n\treturn strings.TrimSpace(t[:48]) + \"...\"\n}\n\nfunc isBrainReadOnlyQuery(query string) bool {\n\tre := regexp.MustCompile(`(?is)^\\s*(SELECT|WITH|SHOW|DESC|DESCRIBE|EXPLAIN)\\b`)\n\treturn re.MatchString(query)\n}\n\nvar sqlBlockPattern = regexp.MustCompile(\"(?i)```sql\")\n\nfunc containsSQL(text string) bool {\n\treturn sqlBlockPattern.MatchString(text)\n}\n"
  },
  {
    "path": "internal/server/handlers/connections.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/go-chi/chi/v5\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/license\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n)\n\n// ConnectionsHandler handles connection management routes.\ntype ConnectionsHandler struct {\n\tDB      *database.DB\n\tGateway *tunnel.Gateway\n\tConfig  *config.Config\n}\n\n// connectionResponse extends Connection with live status information.\ntype connectionResponse struct {\n\tdatabase.Connection\n\tOnline   bool       `json:\"online\"`\n\tLastSeen *time.Time `json:\"last_seen,omitempty\"`\n\tHostInfo any        `json:\"host_info,omitempty\"`\n}\n\n// List returns all connections.\n// GET /\nfunc (h *ConnectionsHandler) List(w http.ResponseWriter, r *http.Request) {\n\tconns, err := h.DB.GetConnections()\n\tif err != nil {\n\t\tslog.Error(\"Failed to list connections\", \"error\", err)\n\t\tconnJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve connections\"})\n\t\treturn\n\t}\n\n\tresults := make([]connectionResponse, 0, len(conns))\n\tfor _, c := range conns {\n\t\tresults = append(results, h.buildConnectionResponse(c))\n\t}\n\n\tconnJSON(w, http.StatusOK, results)\n}\n\n// Get returns a single connection by ID.\n// GET /{id}\nfunc (h *ConnectionsHandler) Get(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\tconnJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Connection ID is required\"})\n\t\treturn\n\t}\n\n\tconn, err := h.DB.GetConnectionByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get connection\", \"error\", err, \"id\", id)\n\t\tconnJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve connection\"})\n\t\treturn\n\t}\n\tif conn == nil {\n\t\tconnJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Connection not found\"})\n\t\treturn\n\t}\n\n\tconnJSON(w, http.StatusOK, h.buildConnectionResponse(*conn))\n}\n\n// Create creates a new connection.\n// POST /\nfunc (h *ConnectionsHandler) Create(w http.ResponseWriter, r *http.Request) {\n\tvar body struct {\n\t\tName string `json:\"name\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\tconnJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\tif name == \"\" {\n\t\tconnJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Connection name is required\"})\n\t\treturn\n\t}\n\n\ttoken := license.GenerateTunnelToken()\n\n\tid, err := h.DB.CreateConnection(name, token, false)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create connection\", \"error\", err)\n\t\tconnJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create connection\"})\n\t\treturn\n\t}\n\n\tsession := middleware.GetSession(r)\n\tvar username *string\n\tif session != nil {\n\t\tusername = strPtr(session.ClickhouseUser)\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"connection.created\",\n\t\tUsername:     username,\n\t\tConnectionID: strPtr(id),\n\t\tDetails:      strPtr(fmt.Sprintf(\"Created connection %q\", name)),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\tconn, err := h.DB.GetConnectionByID(id)\n\tif err != nil || conn == nil {\n\t\tslog.Error(\"Failed to retrieve created connection\", \"error\", err, \"id\", id)\n\t\tconnJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Connection created but failed to retrieve\"})\n\t\treturn\n\t}\n\n\tconnJSON(w, http.StatusCreated, map[string]interface{}{\n\t\t\"connection\":         conn,\n\t\t\"tunnel_token\":       token,\n\t\t\"setup_instructions\": getSetupInstructions(token),\n\t})\n}\n\n// Delete deletes a connection by ID.\n// DELETE /{id}\nfunc (h *ConnectionsHandler) Delete(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\tconnJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Connection ID is required\"})\n\t\treturn\n\t}\n\n\tconn, err := h.DB.GetConnectionByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get connection for deletion\", \"error\", err, \"id\", id)\n\t\tconnJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve connection\"})\n\t\treturn\n\t}\n\tif conn == nil {\n\t\tconnJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Connection not found\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.DeleteConnection(id); err != nil {\n\t\tslog.Error(\"Failed to delete connection\", \"error\", err, \"id\", id)\n\t\tconnJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete connection\"})\n\t\treturn\n\t}\n\n\tsession := middleware.GetSession(r)\n\tvar username *string\n\tif session != nil {\n\t\tusername = strPtr(session.ClickhouseUser)\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"connection.deleted\",\n\t\tUsername:     username,\n\t\tConnectionID: strPtr(id),\n\t\tDetails:      strPtr(fmt.Sprintf(\"Deleted connection %q\", conn.Name)),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\tconnJSON(w, http.StatusOK, map[string]string{\"message\": \"Connection deleted successfully\"})\n}\n\n// TestConnection tests a ClickHouse connection through the tunnel.\n// POST /{id}/test\nfunc (h *ConnectionsHandler) TestConnection(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\tconnJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Connection ID is required\"})\n\t\treturn\n\t}\n\n\tconn, err := h.DB.GetConnectionByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get connection for test\", \"error\", err, \"id\", id)\n\t\tconnJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve connection\"})\n\t\treturn\n\t}\n\tif conn == nil {\n\t\tconnJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Connection not found\"})\n\t\treturn\n\t}\n\n\tif !h.Gateway.IsTunnelOnline(id) {\n\t\tconnJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"success\": false,\n\t\t\t\"error\":   \"Tunnel is not connected. Please ensure the agent is running.\",\n\t\t})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tUsername string `json:\"username\"`\n\t\tPassword string `json:\"password\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\tconnJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tusername := strings.TrimSpace(body.Username)\n\tpassword := body.Password\n\tif username == \"\" {\n\t\tusername = \"default\"\n\t}\n\n\tresult, err := h.Gateway.TestConnection(id, username, password, 15*time.Second)\n\tif err != nil {\n\t\tslog.Warn(\"Connection test failed\", \"error\", err, \"id\", id)\n\t\tconnJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"success\": false,\n\t\t\t\"error\":   err.Error(),\n\t\t})\n\t\treturn\n\t}\n\n\tconnJSON(w, http.StatusOK, result)\n}\n\n// GetToken returns the tunnel token for a connection.\n// GET /{id}/token\nfunc (h *ConnectionsHandler) GetToken(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\tconnJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Connection ID is required\"})\n\t\treturn\n\t}\n\n\tconn, err := h.DB.GetConnectionByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get connection for token\", \"error\", err, \"id\", id)\n\t\tconnJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve connection\"})\n\t\treturn\n\t}\n\tif conn == nil {\n\t\tconnJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Connection not found\"})\n\t\treturn\n\t}\n\n\tconnJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"tunnel_token\":       conn.TunnelToken,\n\t\t\"setup_instructions\": getSetupInstructions(conn.TunnelToken),\n\t})\n}\n\n// RegenerateToken generates a new tunnel token for a connection.\n// POST /{id}/regenerate-token\nfunc (h *ConnectionsHandler) RegenerateToken(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\tconnJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Connection ID is required\"})\n\t\treturn\n\t}\n\n\tconn, err := h.DB.GetConnectionByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get connection for token regeneration\", \"error\", err, \"id\", id)\n\t\tconnJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve connection\"})\n\t\treturn\n\t}\n\tif conn == nil {\n\t\tconnJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Connection not found\"})\n\t\treturn\n\t}\n\n\tnewToken := license.GenerateTunnelToken()\n\n\tif err := h.DB.UpdateConnectionToken(id, newToken); err != nil {\n\t\tslog.Error(\"Failed to regenerate token\", \"error\", err, \"id\", id)\n\t\tconnJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to regenerate token\"})\n\t\treturn\n\t}\n\n\tsession := middleware.GetSession(r)\n\tvar username *string\n\tif session != nil {\n\t\tusername = strPtr(session.ClickhouseUser)\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"connection.token_regenerated\",\n\t\tUsername:     username,\n\t\tConnectionID: strPtr(id),\n\t\tDetails:      strPtr(fmt.Sprintf(\"Regenerated token for connection %q\", conn.Name)),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\tconnJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"tunnel_token\":       newToken,\n\t\t\"setup_instructions\": getSetupInstructions(newToken),\n\t\t\"message\":            \"Token regenerated successfully. The previous token is now invalid.\",\n\t})\n}\n\n// buildConnectionResponse enriches a Connection with live status from the gateway.\nfunc (h *ConnectionsHandler) buildConnectionResponse(c database.Connection) connectionResponse {\n\tresp := connectionResponse{\n\t\tConnection: c,\n\t}\n\n\tonline, lastSeen := h.Gateway.GetTunnelStatus(c.ID)\n\tresp.Online = online\n\tif online && !lastSeen.IsZero() {\n\t\tresp.LastSeen = &lastSeen\n\t}\n\n\tif c.HostInfoJSON != nil && *c.HostInfoJSON != \"\" {\n\t\tvar hostInfo database.HostInfo\n\t\tif err := json.Unmarshal([]byte(*c.HostInfoJSON), &hostInfo); err == nil {\n\t\t\tresp.HostInfo = hostInfo\n\t\t}\n\t}\n\n\treturn resp\n}\n\n// getSetupInstructions returns setup instructions for connecting a tunnel.\nfunc getSetupInstructions(token string) map[string]string {\n\treturn map[string]string{\n\t\t\"connect\": fmt.Sprintf(\"ch-ui connect --url <YOUR_SERVER_URL>/connect --key %s\", token),\n\t\t\"service\": fmt.Sprintf(\"ch-ui service install --url <YOUR_SERVER_URL>/connect --key %s\", token),\n\t}\n}\n\n// connJSON writes a JSON response.\nfunc connJSON(w http.ResponseWriter, status int, v interface{}) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(status)\n\tjson.NewEncoder(w).Encode(v)\n}\n"
  },
  {
    "path": "internal/server/handlers/dashboards.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/go-chi/chi/v5\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/queryproc\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n)\n\n// DashboardsHandler handles dashboard and panel CRUD operations.\ntype DashboardsHandler struct {\n\tDB      *database.DB\n\tGateway *tunnel.Gateway\n\tConfig  *config.Config\n}\n\n// Routes returns a chi.Router with all dashboard and panel routes mounted.\nfunc (h *DashboardsHandler) Routes() chi.Router {\n\tr := chi.NewRouter()\n\n\tr.Get(\"/\", h.ListDashboards)\n\tr.Post(\"/\", h.CreateDashboard)\n\tr.Post(\"/query\", h.ExecutePanelQuery)\n\n\tr.Route(\"/{id}\", func(r chi.Router) {\n\t\tr.Get(\"/\", h.GetDashboard)\n\t\tr.Put(\"/\", h.UpdateDashboard)\n\t\tr.Delete(\"/\", h.DeleteDashboard)\n\n\t\t// Panel CRUD\n\t\tr.Post(\"/panels\", h.CreatePanel)\n\t\tr.Put(\"/panels/{panelId}\", h.UpdatePanel)\n\t\tr.Delete(\"/panels/{panelId}\", h.DeletePanel)\n\t})\n\n\treturn r\n}\n\n// ---------- Dashboard CRUD ----------\n\n// ListDashboards returns all dashboards.\nfunc (h *DashboardsHandler) ListDashboards(w http.ResponseWriter, r *http.Request) {\n\tif err := h.DB.EnsureSystemOverviewDashboard(); err != nil {\n\t\tslog.Warn(\"Failed to ensure default system dashboard\", \"error\", err)\n\t}\n\n\tdashboards, err := h.DB.GetDashboards()\n\tif err != nil {\n\t\tslog.Error(\"Failed to list dashboards\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list dashboards\"})\n\t\treturn\n\t}\n\n\tif dashboards == nil {\n\t\tdashboards = []database.Dashboard{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"dashboards\": dashboards})\n}\n\n// GetDashboard returns a single dashboard with all its panels.\nfunc (h *DashboardsHandler) GetDashboard(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Dashboard ID is required\"})\n\t\treturn\n\t}\n\n\tdashboard, err := h.DB.GetDashboardByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get dashboard\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get dashboard\"})\n\t\treturn\n\t}\n\tif dashboard == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Dashboard not found\"})\n\t\treturn\n\t}\n\n\tpanels, err := h.DB.GetPanelsByDashboard(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get panels\", \"error\", err, \"dashboard\", id)\n\t\tpanels = []database.Panel{}\n\t}\n\tif panels == nil {\n\t\tpanels = []database.Panel{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"dashboard\": dashboard,\n\t\t\"panels\":    panels,\n\t})\n}\n\n// CreateDashboard creates a new dashboard.\nfunc (h *DashboardsHandler) CreateDashboard(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName        string `json:\"name\"`\n\t\tDescription string `json:\"description\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid JSON body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Name is required\"})\n\t\treturn\n\t}\n\n\tid, err := h.DB.CreateDashboard(name, strings.TrimSpace(body.Description), session.ClickhouseUser)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create dashboard\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create dashboard\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"dashboard.created\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(name),\n\t})\n\n\tdashboard, err := h.DB.GetDashboardByID(id)\n\tif err != nil || dashboard == nil {\n\t\twriteJSON(w, http.StatusCreated, map[string]string{\"id\": id})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"dashboard\": dashboard})\n}\n\n// UpdateDashboard partially updates a dashboard.\nfunc (h *DashboardsHandler) UpdateDashboard(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Dashboard ID is required\"})\n\t\treturn\n\t}\n\n\texisting, err := h.DB.GetDashboardByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get dashboard for update\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get dashboard\"})\n\t\treturn\n\t}\n\tif existing == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Dashboard not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName        *string `json:\"name\"`\n\t\tDescription *string `json:\"description\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid JSON body\"})\n\t\treturn\n\t}\n\n\tname := existing.Name\n\tdescription := \"\"\n\tif existing.Description != nil {\n\t\tdescription = *existing.Description\n\t}\n\n\tchanged := false\n\tif body.Name != nil {\n\t\tn := strings.TrimSpace(*body.Name)\n\t\tif n == \"\" {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Name cannot be empty\"})\n\t\t\treturn\n\t\t}\n\t\tname = n\n\t\tchanged = true\n\t}\n\tif body.Description != nil {\n\t\tdescription = strings.TrimSpace(*body.Description)\n\t\tchanged = true\n\t}\n\n\tif !changed {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"No fields to update\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.UpdateDashboard(id, name, description); err != nil {\n\t\tslog.Error(\"Failed to update dashboard\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update dashboard\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"dashboard.updated\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(name),\n\t})\n\n\tdashboard, err := h.DB.GetDashboardByID(id)\n\tif err != nil || dashboard == nil {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"dashboard\": dashboard})\n}\n\n// DeleteDashboard deletes a dashboard and all its panels.\nfunc (h *DashboardsHandler) DeleteDashboard(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Dashboard ID is required\"})\n\t\treturn\n\t}\n\n\texisting, err := h.DB.GetDashboardByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get dashboard for delete\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get dashboard\"})\n\t\treturn\n\t}\n\tif existing == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Dashboard not found\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.DeleteDashboard(id); err != nil {\n\t\tslog.Error(\"Failed to delete dashboard\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete dashboard\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"dashboard.deleted\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(existing.Name),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\n// ---------- Panel CRUD ----------\n\n// CreatePanel creates a new panel in a dashboard.\nfunc (h *DashboardsHandler) CreatePanel(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tdashboardID := chi.URLParam(r, \"id\")\n\tif dashboardID == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Dashboard ID is required\"})\n\t\treturn\n\t}\n\n\tdashboard, err := h.DB.GetDashboardByID(dashboardID)\n\tif err != nil || dashboard == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Dashboard not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName         string `json:\"name\"`\n\t\tPanelType    string `json:\"panel_type\"`\n\t\tQuery        string `json:\"query\"`\n\t\tConnectionID string `json:\"connection_id\"`\n\t\tConfig       string `json:\"config\"`\n\t\tLayoutX      *int   `json:\"layout_x\"`\n\t\tLayoutY      *int   `json:\"layout_y\"`\n\t\tLayoutW      *int   `json:\"layout_w\"`\n\t\tLayoutH      *int   `json:\"layout_h\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid JSON body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Name is required\"})\n\t\treturn\n\t}\n\n\tquery := strings.TrimSpace(body.Query)\n\tif query == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Query is required\"})\n\t\treturn\n\t}\n\n\tpanelType := strings.TrimSpace(body.PanelType)\n\tif panelType == \"\" {\n\t\tpanelType = \"table\"\n\t}\n\n\tconnectionID := strings.TrimSpace(body.ConnectionID)\n\tpanelConfig := strings.TrimSpace(body.Config)\n\n\tx, y, w2, h2 := 0, 0, 6, 4\n\tif body.LayoutX != nil {\n\t\tx = *body.LayoutX\n\t}\n\tif body.LayoutY != nil {\n\t\ty = *body.LayoutY\n\t}\n\tif body.LayoutW != nil {\n\t\tw2 = *body.LayoutW\n\t}\n\tif body.LayoutH != nil {\n\t\th2 = *body.LayoutH\n\t}\n\n\tid, err := h.DB.CreatePanel(dashboardID, name, panelType, query, connectionID, panelConfig, x, y, w2, h2)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create panel\", \"error\", err, \"dashboard\", dashboardID)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create panel\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"panel.created\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(name),\n\t})\n\n\tpanel, err := h.DB.GetPanelByID(id)\n\tif err != nil || panel == nil {\n\t\twriteJSON(w, http.StatusCreated, map[string]string{\"id\": id})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"panel\": panel})\n}\n\n// UpdatePanel partially updates a panel.\nfunc (h *DashboardsHandler) UpdatePanel(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tdashboardID := chi.URLParam(r, \"id\")\n\tpanelID := chi.URLParam(r, \"panelId\")\n\tif dashboardID == \"\" || panelID == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Dashboard ID and panel ID are required\"})\n\t\treturn\n\t}\n\n\texisting, err := h.DB.GetPanelByID(panelID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get panel for update\", \"error\", err, \"panel\", panelID)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get panel\"})\n\t\treturn\n\t}\n\tif existing == nil || existing.DashboardID != dashboardID {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Panel not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName         *string `json:\"name\"`\n\t\tPanelType    *string `json:\"panel_type\"`\n\t\tQuery        *string `json:\"query\"`\n\t\tConnectionID *string `json:\"connection_id\"`\n\t\tConfig       *string `json:\"config\"`\n\t\tLayoutX      *int    `json:\"layout_x\"`\n\t\tLayoutY      *int    `json:\"layout_y\"`\n\t\tLayoutW      *int    `json:\"layout_w\"`\n\t\tLayoutH      *int    `json:\"layout_h\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid JSON body\"})\n\t\treturn\n\t}\n\n\tname := existing.Name\n\tpanelType := existing.PanelType\n\tquery := existing.Query\n\tconnectionID := \"\"\n\tif existing.ConnectionID != nil {\n\t\tconnectionID = *existing.ConnectionID\n\t}\n\tpanelConfig := existing.Config\n\tx, y, pw, ph := existing.LayoutX, existing.LayoutY, existing.LayoutW, existing.LayoutH\n\n\tchanged := false\n\tif body.Name != nil {\n\t\tname = strings.TrimSpace(*body.Name)\n\t\tchanged = true\n\t}\n\tif body.PanelType != nil {\n\t\tpanelType = strings.TrimSpace(*body.PanelType)\n\t\tchanged = true\n\t}\n\tif body.Query != nil {\n\t\tquery = strings.TrimSpace(*body.Query)\n\t\tchanged = true\n\t}\n\tif body.ConnectionID != nil {\n\t\tconnectionID = strings.TrimSpace(*body.ConnectionID)\n\t\tchanged = true\n\t}\n\tif body.Config != nil {\n\t\tpanelConfig = *body.Config\n\t\tchanged = true\n\t}\n\tif body.LayoutX != nil {\n\t\tx = *body.LayoutX\n\t\tchanged = true\n\t}\n\tif body.LayoutY != nil {\n\t\ty = *body.LayoutY\n\t\tchanged = true\n\t}\n\tif body.LayoutW != nil {\n\t\tpw = *body.LayoutW\n\t\tchanged = true\n\t}\n\tif body.LayoutH != nil {\n\t\tph = *body.LayoutH\n\t\tchanged = true\n\t}\n\n\tif !changed {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"No fields to update\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.UpdatePanel(panelID, name, panelType, query, connectionID, panelConfig, x, y, pw, ph); err != nil {\n\t\tslog.Error(\"Failed to update panel\", \"error\", err, \"panel\", panelID)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update panel\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"panel.updated\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(name),\n\t})\n\n\tpanel, err := h.DB.GetPanelByID(panelID)\n\tif err != nil || panel == nil {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"panel\": panel})\n}\n\n// DeletePanel deletes a panel from a dashboard.\nfunc (h *DashboardsHandler) DeletePanel(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tdashboardID := chi.URLParam(r, \"id\")\n\tpanelID := chi.URLParam(r, \"panelId\")\n\tif dashboardID == \"\" || panelID == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Dashboard ID and panel ID are required\"})\n\t\treturn\n\t}\n\n\texisting, err := h.DB.GetPanelByID(panelID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get panel for delete\", \"error\", err, \"panel\", panelID)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get panel\"})\n\t\treturn\n\t}\n\tif existing == nil || existing.DashboardID != dashboardID {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Panel not found\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.DeletePanel(panelID); err != nil {\n\t\tslog.Error(\"Failed to delete panel\", \"error\", err, \"panel\", panelID)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete panel\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"panel.deleted\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(existing.Name),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\n// ---------- Panel Query Execution ----------\n\n// ExecutePanelQuery executes a SQL query through the tunnel for a panel.\nfunc (h *DashboardsHandler) ExecutePanelQuery(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tQuery         string               `json:\"query\"`\n\t\tTimeout       *int                 `json:\"timeout\"`\n\t\tTimeRange     *queryproc.TimeRange `json:\"time_range\"`\n\t\tTimeField     string               `json:\"time_field\"`\n\t\tTimeFieldUnit string               `json:\"time_field_unit\"`\n\t\tMaxDataPoints *int                 `json:\"max_data_points\"`\n\t\tTable         string               `json:\"table\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid JSON body\"})\n\t\treturn\n\t}\n\n\tquery := strings.TrimSpace(body.Query)\n\tif query == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Query is required\"})\n\t\treturn\n\t}\n\n\tmaxDataPoints := 1000\n\tif body.MaxDataPoints != nil && *body.MaxDataPoints > 0 {\n\t\tmaxDataPoints = *body.MaxDataPoints\n\t}\n\n\tprocessed := queryproc.ProcessQueryVariables(queryproc.ProcessorOptions{\n\t\tQuery:         query,\n\t\tTimeRange:     body.TimeRange,\n\t\tTimeField:     strings.TrimSpace(body.TimeField),\n\t\tTimeFieldUnit: strings.TrimSpace(body.TimeFieldUnit),\n\t\tMaxDataPoints: maxDataPoints,\n\t\tTable:         strings.TrimSpace(body.Table),\n\t})\n\n\tif len(processed.Errors) > 0 {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]interface{}{\n\t\t\t\"success\": false,\n\t\t\t\"error\":   strings.Join(processed.Errors, \"; \"),\n\t\t})\n\t\treturn\n\t}\n\n\tquery = strings.TrimSpace(processed.Query)\n\tif query == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Processed query is empty\"})\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to decrypt credentials\"})\n\t\treturn\n\t}\n\n\ttimeout := 30 * time.Second\n\tif body.Timeout != nil && *body.Timeout > 0 {\n\t\ttimeout = time.Duration(*body.Timeout) * time.Second\n\t\tif timeout > 5*time.Minute {\n\t\t\ttimeout = 5 * time.Minute\n\t\t}\n\t}\n\n\tstart := time.Now()\n\tresult, err := h.Gateway.ExecuteQuery(session.ConnectionID, query, session.ClickhouseUser, password, timeout)\n\telapsed := time.Since(start)\n\n\tif err != nil {\n\t\tslog.Warn(\"Panel query failed\", \"error\", err, \"user\", session.ClickhouseUser)\n\t\twriteJSON(w, http.StatusBadGateway, map[string]interface{}{\n\t\t\t\"success\":    false,\n\t\t\t\"error\":      err.Error(),\n\t\t\t\"elapsed_ms\": elapsed.Milliseconds(),\n\t\t})\n\t\treturn\n\t}\n\n\trows := countRows(result.Data)\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":    true,\n\t\t\"data\":       result.Data,\n\t\t\"meta\":       result.Meta,\n\t\t\"statistics\": result.Stats,\n\t\t\"rows\":       rows,\n\t\t\"elapsed_ms\": elapsed.Milliseconds(),\n\t\t\"query\":      query,\n\t\t\"variables\":  processed.InterpolatedVars,\n\t})\n}\n"
  },
  {
    "path": "internal/server/handlers/governance.go",
    "content": "package handlers\n\nimport (\n\t\"context\"\n\t\"database/sql\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/governance\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n\t\"github.com/go-chi/chi/v5\"\n)\n\n// GovernanceHandler handles all governance-related HTTP endpoints.\ntype GovernanceHandler struct {\n\tDB      *database.DB\n\tGateway *tunnel.Gateway\n\tConfig  *config.Config\n\tStore   *governance.Store\n\tSyncer  *governance.Syncer\n}\n\n// Routes returns a chi.Router with all governance routes mounted.\nfunc (h *GovernanceHandler) Routes() chi.Router {\n\tr := chi.NewRouter()\n\n\t// Overview & Sync\n\tr.Get(\"/overview\", h.GetOverview)\n\tr.Post(\"/sync\", h.TriggerSync)\n\tr.Post(\"/sync/{type}\", h.TriggerSingleSync)\n\tr.Get(\"/sync/status\", h.GetSyncStatus)\n\n\t// Metadata\n\tr.Get(\"/databases\", h.ListDatabases)\n\tr.Get(\"/tables\", h.ListTables)\n\tr.Get(\"/tables/{db}/{table}\", h.GetTableDetail)\n\tr.Get(\"/tables/{db}/{table}/notes\", h.ListTableNotes)\n\tr.Get(\"/tables/{db}/{table}/columns/{column}/notes\", h.ListColumnNotes)\n\tr.With(middleware.RequireAdmin(h.DB)).Post(\"/tables/{db}/{table}/notes\", h.CreateTableNote)\n\tr.With(middleware.RequireAdmin(h.DB)).Post(\"/tables/{db}/{table}/columns/{column}/notes\", h.CreateColumnNote)\n\tr.With(middleware.RequireAdmin(h.DB)).Delete(\"/notes/{id}\", h.DeleteObjectNote)\n\tr.With(middleware.RequireAdmin(h.DB)).Put(\"/tables/{db}/{table}/comment\", h.UpdateTableComment)\n\tr.With(middleware.RequireAdmin(h.DB)).Put(\"/tables/{db}/{table}/columns/{column}/comment\", h.UpdateColumnComment)\n\tr.Get(\"/schema-changes\", h.ListSchemaChanges)\n\n\t// Query Log\n\tr.Get(\"/query-log\", h.ListQueryLog)\n\tr.Get(\"/query-log/top\", h.TopQueries)\n\tr.Get(\"/query-log/{query_id}\", h.GetQueryByQueryID)\n\n\t// Lineage\n\tr.Get(\"/lineage\", h.GetLineage)\n\tr.Get(\"/lineage/graph\", h.GetLineageGraph)\n\n\t// View dependency graph (structural lineage from MV/View definitions)\n\tr.Get(\"/view-graph\", h.GetViewGraph)\n\n\t// Tags\n\tr.Get(\"/tags\", h.ListTags)\n\tr.Post(\"/tags\", h.CreateTag)\n\tr.Delete(\"/tags/{id}\", h.DeleteTag)\n\n\t// Access\n\tr.Route(\"/access\", func(ar chi.Router) {\n\t\tar.Get(\"/users\", h.ListChUsers)\n\t\tar.With(middleware.RequireAdmin(h.DB)).Post(\"/users\", h.CreateChUser)\n\t\tar.With(middleware.RequireAdmin(h.DB)).Delete(\"/users/{name}\", h.DeleteChUser)\n\t\tar.Get(\"/roles\", h.ListChRoles)\n\t\tar.Get(\"/matrix\", h.GetAccessMatrix)\n\t\tar.Get(\"/over-permissions\", h.GetOverPermissions)\n\t})\n\n\t// Policies\n\tr.Route(\"/policies\", func(pr chi.Router) {\n\t\tpr.With(middleware.RequireAdmin(h.DB)).Get(\"/\", h.ListPolicies)\n\t\tpr.With(middleware.RequireAdmin(h.DB)).Post(\"/\", h.CreatePolicy)\n\t\tpr.With(middleware.RequireAdmin(h.DB)).Get(\"/{id}\", h.GetPolicy)\n\t\tpr.With(middleware.RequireAdmin(h.DB)).Put(\"/{id}\", h.UpdatePolicy)\n\t\tpr.With(middleware.RequireAdmin(h.DB)).Delete(\"/{id}\", h.DeletePolicy)\n\t})\n\n\t// Violations\n\tr.Get(\"/violations\", h.ListViolations)\n\tr.With(middleware.RequireAdmin(h.DB)).Post(\"/violations/{id}/incident\", h.CreateIncidentFromViolation)\n\n\t// Incidents\n\tr.Get(\"/incidents\", h.ListIncidents)\n\tr.Get(\"/incidents/{id}\", h.GetIncident)\n\tr.With(middleware.RequireAdmin(h.DB)).Post(\"/incidents\", h.CreateIncident)\n\tr.With(middleware.RequireAdmin(h.DB)).Put(\"/incidents/{id}\", h.UpdateIncident)\n\tr.Get(\"/incidents/{id}/comments\", h.ListIncidentComments)\n\tr.With(middleware.RequireAdmin(h.DB)).Post(\"/incidents/{id}/comments\", h.CreateIncidentComment)\n\n\t// Audit logs\n\tr.Get(\"/audit-logs\", h.GetAuditLogs)\n\n\t// ClickHouse query log\n\tr.Get(\"/clickhouse-query-log\", h.GetClickHouseQueryLog)\n\n\t// Alerts management\n\tr.Route(\"/alerts\", func(ar chi.Router) {\n\t\tar.Get(\"/channels\", h.ListAlertChannels)\n\t\tar.Post(\"/channels\", h.CreateAlertChannel)\n\t\tar.Put(\"/channels/{id}\", h.UpdateAlertChannel)\n\t\tar.Delete(\"/channels/{id}\", h.DeleteAlertChannel)\n\t\tar.Post(\"/channels/{id}/test\", h.TestAlertChannel)\n\t\tar.Get(\"/rules\", h.ListAlertRules)\n\t\tar.Post(\"/rules\", h.CreateAlertRule)\n\t\tar.Put(\"/rules/{id}\", h.UpdateAlertRule)\n\t\tar.Delete(\"/rules/{id}\", h.DeleteAlertRule)\n\t\tar.Get(\"/events\", h.ListAlertEvents)\n\t})\n\n\treturn r\n}\n\n// ── Helpers ──────────────────────────────────────────────────────────────────\n\nfunc (h *GovernanceHandler) getCredentials(r *http.Request) (*governance.CHCredentials, error) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\treturn nil, fmt.Errorf(\"not authenticated\")\n\t}\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to decrypt credentials: %w\", err)\n\t}\n\treturn &governance.CHCredentials{\n\t\tConnectionID: session.ConnectionID,\n\t\tUser:         session.ClickhouseUser,\n\t\tPassword:     password,\n\t}, nil\n}\n\nfunc (h *GovernanceHandler) executeClickHouseSQL(creds *governance.CHCredentials, sql string) error {\n\t_, err := h.Gateway.ExecuteQuery(creds.ConnectionID, sql, creds.User, creds.Password, 30*time.Second)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"execute clickhouse query: %w\", err)\n\t}\n\treturn nil\n}\n\nfunc (h *GovernanceHandler) triggerSyncAsync(creds governance.CHCredentials, syncType governance.SyncType) {\n\tif !h.DB.GovernanceSyncEnabled() {\n\t\treturn\n\t}\n\tgo func() {\n\t\tctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)\n\t\tdefer cancel()\n\t\tif err := h.Syncer.SyncSingle(ctx, creds, syncType); err != nil {\n\t\t\tslog.Warn(\"Governance async sync failed\", \"connection\", creds.ConnectionID, \"type\", syncType, \"error\", err)\n\t\t}\n\t}()\n}\n\nfunc (h *GovernanceHandler) connectionID(r *http.Request) string {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\treturn \"\"\n\t}\n\treturn session.ConnectionID\n}\n\nfunc queryInt(r *http.Request, key string, defaultVal int) int {\n\tv := r.URL.Query().Get(key)\n\tif v == \"\" {\n\t\treturn defaultVal\n\t}\n\tn, err := strconv.Atoi(v)\n\tif err != nil {\n\t\treturn defaultVal\n\t}\n\treturn n\n}\n\nfunc queryIntBounded(r *http.Request, key string, defaultVal, minVal, maxVal int) int {\n\tn := queryInt(r, key, defaultVal)\n\tif n < minVal {\n\t\treturn minVal\n\t}\n\tif n > maxVal {\n\t\treturn maxVal\n\t}\n\treturn n\n}\n\n// ── Overview & Sync ──────────────────────────────────────────────────────────\n\nfunc (h *GovernanceHandler) GetOverview(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\toverview, err := h.Store.GetOverview(connID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get governance overview\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get overview\"})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"overview\": overview})\n}\n\nfunc (h *GovernanceHandler) TriggerSync(w http.ResponseWriter, r *http.Request) {\n\tif !h.DB.GovernanceSyncEnabled() {\n\t\twriteJSON(w, http.StatusConflict, map[string]string{\n\t\t\t\"error\": \"governance_sync_disabled\",\n\t\t\t\"hint\":  \"Enable governance sync in Governance → Settings before triggering a sync.\",\n\t\t})\n\t\treturn\n\t}\n\n\tcreds, err := h.getCredentials(r)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tresult, err := h.Syncer.SyncConnection(r.Context(), *creds)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusConflict, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tsession := middleware.GetSession(r)\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.sync\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(\"full sync triggered\"),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"result\": result})\n}\n\nfunc (h *GovernanceHandler) TriggerSingleSync(w http.ResponseWriter, r *http.Request) {\n\tif !h.DB.GovernanceSyncEnabled() {\n\t\twriteJSON(w, http.StatusConflict, map[string]string{\n\t\t\t\"error\": \"governance_sync_disabled\",\n\t\t\t\"hint\":  \"Enable governance sync in Governance → Settings before triggering a sync.\",\n\t\t})\n\t\treturn\n\t}\n\n\tsyncType := governance.SyncType(chi.URLParam(r, \"type\"))\n\tif syncType != governance.SyncMetadata && syncType != governance.SyncQueryLog && syncType != governance.SyncAccess {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid sync type. Use: metadata, query_log, access\"})\n\t\treturn\n\t}\n\n\tcreds, err := h.getCredentials(r)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tif err := h.Syncer.SyncSingle(r.Context(), *creds, syncType); err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *GovernanceHandler) GetSyncStatus(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tstates, err := h.Store.GetSyncStates(connID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get sync status\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get sync status\"})\n\t\treturn\n\t}\n\tif states == nil {\n\t\tstates = []governance.SyncState{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"sync_states\": states})\n}\n\n// ── Metadata ─────────────────────────────────────────────────────────────────\n\nfunc (h *GovernanceHandler) ListDatabases(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tdatabases, err := h.Store.GetDatabases(connID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list governance databases\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list databases\"})\n\t\treturn\n\t}\n\tif databases == nil {\n\t\tdatabases = []governance.GovDatabase{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"databases\": databases})\n}\n\nfunc (h *GovernanceHandler) ListTables(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tdbFilter := r.URL.Query().Get(\"database\")\n\tvar tables []governance.GovTable\n\tvar err error\n\n\tif dbFilter != \"\" {\n\t\ttables, err = h.Store.GetTablesByDatabase(connID, dbFilter)\n\t} else {\n\t\ttables, err = h.Store.GetTables(connID)\n\t}\n\tif err != nil {\n\t\tslog.Error(\"Failed to list governance tables\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list tables\"})\n\t\treturn\n\t}\n\tif tables == nil {\n\t\ttables = []governance.GovTable{}\n\t}\n\n\t// Enrich with tags\n\tfor i := range tables {\n\t\ttags, _ := h.Store.GetTagsForTable(connID, tables[i].DatabaseName, tables[i].TableName)\n\t\ttagNames := make([]string, 0)\n\t\tfor _, t := range tags {\n\t\t\tif t.ObjectType == \"table\" {\n\t\t\t\ttagNames = append(tagNames, t.Tag)\n\t\t\t}\n\t\t}\n\t\ttables[i].Tags = tagNames\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"tables\": tables, \"total\": len(tables)})\n}\n\nfunc (h *GovernanceHandler) GetTableDetail(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tdbName := chi.URLParam(r, \"db\")\n\ttableName := chi.URLParam(r, \"table\")\n\n\ttable, err := h.Store.GetTableByName(connID, dbName, tableName)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get table detail\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get table\"})\n\t\treturn\n\t}\n\tif table == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Table not found\"})\n\t\treturn\n\t}\n\n\t// Get columns\n\tcolumns, _ := h.Store.GetColumns(connID, dbName, tableName)\n\tif columns == nil {\n\t\tcolumns = []governance.GovColumn{}\n\t}\n\n\t// Enrich columns with tags\n\tfor i := range columns {\n\t\tcolTags, _ := h.Store.GetTagsForColumn(connID, dbName, tableName, columns[i].ColumnName)\n\t\ttagNames := make([]string, 0)\n\t\tfor _, t := range colTags {\n\t\t\ttagNames = append(tagNames, t.Tag)\n\t\t}\n\t\tcolumns[i].Tags = tagNames\n\t}\n\n\t// Get table tags\n\ttableTags, _ := h.Store.GetTagsForTable(connID, dbName, tableName)\n\ttagNames := make([]string, 0)\n\tfor _, t := range tableTags {\n\t\tif t.ObjectType == \"table\" {\n\t\t\ttagNames = append(tagNames, t.Tag)\n\t\t}\n\t}\n\ttable.Tags = tagNames\n\n\t// Get recent queries\n\tqueries, _, _ := h.Store.GetQueryLog(connID, 20, 0, \"\", dbName+\".\"+tableName)\n\n\t// Get lineage\n\tupstream, downstream, _ := h.Store.GetLineageForTable(connID, dbName, tableName)\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"table\":          table,\n\t\t\"columns\":        columns,\n\t\t\"tags\":           tableTags,\n\t\t\"queries\":        queries,\n\t\t\"recent_queries\": queries,\n\t\t\"upstream\":       upstream,\n\t\t\"downstream\":     downstream,\n\t})\n}\n\nfunc (h *GovernanceHandler) UpdateTableComment(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tcreds, err := h.getCredentials(r)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tdbName := strings.TrimSpace(chi.URLParam(r, \"db\"))\n\ttableName := strings.TrimSpace(chi.URLParam(r, \"table\"))\n\tif dbName == \"\" || tableName == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Database and table are required\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tComment string `json:\"comment\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tsql := fmt.Sprintf(\n\t\t\"ALTER TABLE %s.%s MODIFY COMMENT '%s'\",\n\t\tescapeIdentifier(dbName),\n\t\tescapeIdentifier(tableName),\n\t\tescapeLiteral(body.Comment),\n\t)\n\tif err := h.executeClickHouseSQL(creds, sql); err != nil {\n\t\tslog.Error(\"Failed to update table comment\", \"connection\", session.ConnectionID, \"db\", dbName, \"table\", tableName, \"error\", err)\n\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.table.comment.updated\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"%s.%s\", dbName, tableName)),\n\t})\n\n\th.triggerSyncAsync(*creds, governance.SyncMetadata)\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *GovernanceHandler) UpdateColumnComment(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tcreds, err := h.getCredentials(r)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tdbName := strings.TrimSpace(chi.URLParam(r, \"db\"))\n\ttableName := strings.TrimSpace(chi.URLParam(r, \"table\"))\n\tcolumnName := strings.TrimSpace(chi.URLParam(r, \"column\"))\n\tif dbName == \"\" || tableName == \"\" || columnName == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Database, table, and column are required\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tComment string `json:\"comment\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tsql := fmt.Sprintf(\n\t\t\"ALTER TABLE %s.%s COMMENT COLUMN %s '%s'\",\n\t\tescapeIdentifier(dbName),\n\t\tescapeIdentifier(tableName),\n\t\tescapeIdentifier(columnName),\n\t\tescapeLiteral(body.Comment),\n\t)\n\tif err := h.executeClickHouseSQL(creds, sql); err != nil {\n\t\tslog.Error(\"Failed to update column comment\", \"connection\", session.ConnectionID, \"db\", dbName, \"table\", tableName, \"column\", columnName, \"error\", err)\n\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.column.comment.updated\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"%s.%s.%s\", dbName, tableName, columnName)),\n\t})\n\n\th.triggerSyncAsync(*creds, governance.SyncMetadata)\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *GovernanceHandler) ListTableNotes(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\tdbName := strings.TrimSpace(chi.URLParam(r, \"db\"))\n\ttableName := strings.TrimSpace(chi.URLParam(r, \"table\"))\n\tif dbName == \"\" || tableName == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Database and table are required\"})\n\t\treturn\n\t}\n\tnotes, err := h.Store.ListObjectComments(connID, \"table\", dbName, tableName, \"\", 200)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list table notes\", \"connection\", connID, \"db\", dbName, \"table\", tableName, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list table notes\"})\n\t\treturn\n\t}\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"notes\": notes})\n}\n\nfunc (h *GovernanceHandler) ListColumnNotes(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\tdbName := strings.TrimSpace(chi.URLParam(r, \"db\"))\n\ttableName := strings.TrimSpace(chi.URLParam(r, \"table\"))\n\tcolumnName := strings.TrimSpace(chi.URLParam(r, \"column\"))\n\tif dbName == \"\" || tableName == \"\" || columnName == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Database, table and column are required\"})\n\t\treturn\n\t}\n\tnotes, err := h.Store.ListObjectComments(connID, \"column\", dbName, tableName, columnName, 200)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list column notes\", \"connection\", connID, \"db\", dbName, \"table\", tableName, \"column\", columnName, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list column notes\"})\n\t\treturn\n\t}\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"notes\": notes})\n}\n\nfunc (h *GovernanceHandler) CreateTableNote(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\tdbName := strings.TrimSpace(chi.URLParam(r, \"db\"))\n\ttableName := strings.TrimSpace(chi.URLParam(r, \"table\"))\n\tif dbName == \"\" || tableName == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Database and table are required\"})\n\t\treturn\n\t}\n\tvar body struct {\n\t\tCommentText string `json:\"comment_text\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\tcommentText := strings.TrimSpace(body.CommentText)\n\tif commentText == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"comment_text is required\"})\n\t\treturn\n\t}\n\tif len(commentText) > 4000 {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"comment_text must be <= 4000 characters\"})\n\t\treturn\n\t}\n\tid, err := h.Store.CreateObjectComment(session.ConnectionID, \"table\", dbName, tableName, \"\", commentText, session.ClickhouseUser)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create table note\", \"connection\", session.ConnectionID, \"db\", dbName, \"table\", tableName, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create table note\"})\n\t\treturn\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.table.note.created\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"%s.%s\", dbName, tableName)),\n\t})\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"id\": id, \"success\": true})\n}\n\nfunc (h *GovernanceHandler) CreateColumnNote(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\tdbName := strings.TrimSpace(chi.URLParam(r, \"db\"))\n\ttableName := strings.TrimSpace(chi.URLParam(r, \"table\"))\n\tcolumnName := strings.TrimSpace(chi.URLParam(r, \"column\"))\n\tif dbName == \"\" || tableName == \"\" || columnName == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Database, table and column are required\"})\n\t\treturn\n\t}\n\tvar body struct {\n\t\tCommentText string `json:\"comment_text\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\tcommentText := strings.TrimSpace(body.CommentText)\n\tif commentText == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"comment_text is required\"})\n\t\treturn\n\t}\n\tif len(commentText) > 4000 {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"comment_text must be <= 4000 characters\"})\n\t\treturn\n\t}\n\tid, err := h.Store.CreateObjectComment(session.ConnectionID, \"column\", dbName, tableName, columnName, commentText, session.ClickhouseUser)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create column note\", \"connection\", session.ConnectionID, \"db\", dbName, \"table\", tableName, \"column\", columnName, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create column note\"})\n\t\treturn\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.column.note.created\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"%s.%s.%s\", dbName, tableName, columnName)),\n\t})\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"id\": id, \"success\": true})\n}\n\nfunc (h *GovernanceHandler) DeleteObjectNote(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\tid := strings.TrimSpace(chi.URLParam(r, \"id\"))\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"id is required\"})\n\t\treturn\n\t}\n\tif err := h.Store.DeleteObjectComment(session.ConnectionID, id); err != nil {\n\t\tif err == sql.ErrNoRows {\n\t\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Note not found\"})\n\t\t\treturn\n\t\t}\n\t\tslog.Error(\"Failed to delete object note\", \"connection\", session.ConnectionID, \"id\", id, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete note\"})\n\t\treturn\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.object.note.deleted\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(id),\n\t})\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *GovernanceHandler) ListSchemaChanges(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tlimit := queryIntBounded(r, \"limit\", 50, 1, 500)\n\tchanges, err := h.Store.GetSchemaChanges(connID, limit)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list schema changes\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list schema changes\"})\n\t\treturn\n\t}\n\tif changes == nil {\n\t\tchanges = []governance.SchemaChange{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"changes\": changes})\n}\n\n// ── Query Log ────────────────────────────────────────────────────────────────\n\nfunc (h *GovernanceHandler) ListQueryLog(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tlimit := queryIntBounded(r, \"limit\", 100, 1, 5000)\n\toffset := queryIntBounded(r, \"offset\", 0, 0, 1000000)\n\tuser := r.URL.Query().Get(\"user\")\n\ttable := r.URL.Query().Get(\"table\")\n\n\tentries, total, err := h.Store.GetQueryLog(connID, limit, offset, user, table)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list query log\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list query log\"})\n\t\treturn\n\t}\n\tif entries == nil {\n\t\tentries = []governance.QueryLogEntry{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"entries\": entries, \"total\": total})\n}\n\nfunc (h *GovernanceHandler) TopQueries(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tlimit := queryIntBounded(r, \"limit\", 20, 1, 200)\n\ttop, err := h.Store.GetTopQueries(connID, limit)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get top queries\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get top queries\"})\n\t\treturn\n\t}\n\tif top == nil {\n\t\ttop = []map[string]interface{}{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"queries\": top, \"top_queries\": top})\n}\n\n// ── Lineage ──────────────────────────────────────────────────────────────────\n\nfunc (h *GovernanceHandler) GetLineage(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tdbName := r.URL.Query().Get(\"database\")\n\ttableName := r.URL.Query().Get(\"table\")\n\tif dbName == \"\" || tableName == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"database and table query params required\"})\n\t\treturn\n\t}\n\n\tupstream, downstream, err := h.Store.GetLineageForTable(connID, dbName, tableName)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get lineage\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get lineage\"})\n\t\treturn\n\t}\n\tif upstream == nil {\n\t\tupstream = []governance.LineageEdge{}\n\t}\n\tif downstream == nil {\n\t\tdownstream = []governance.LineageEdge{}\n\t}\n\n\t// Build graph representation\n\tnodeMap := make(map[string]governance.LineageNode)\n\tcurrentKey := dbName + \".\" + tableName\n\tnodeMap[currentKey] = governance.LineageNode{\n\t\tID: currentKey, Database: dbName, Table: tableName, Type: \"current\",\n\t}\n\n\tfor _, e := range upstream {\n\t\tkey := e.SourceDatabase + \".\" + e.SourceTable\n\t\tif _, ok := nodeMap[key]; !ok {\n\t\t\tnodeMap[key] = governance.LineageNode{\n\t\t\t\tID: key, Database: e.SourceDatabase, Table: e.SourceTable, Type: \"source\",\n\t\t\t}\n\t\t}\n\t}\n\tfor _, e := range downstream {\n\t\tkey := e.TargetDatabase + \".\" + e.TargetTable\n\t\tif _, ok := nodeMap[key]; !ok {\n\t\t\tnodeMap[key] = governance.LineageNode{\n\t\t\t\tID: key, Database: e.TargetDatabase, Table: e.TargetTable, Type: \"target\",\n\t\t\t}\n\t\t}\n\t}\n\n\tnodes := make([]governance.LineageNode, 0, len(nodeMap))\n\tfor _, n := range nodeMap {\n\t\tnodes = append(nodes, n)\n\t}\n\n\tallEdges := append(upstream, downstream...)\n\n\t// Enrich: include_columns=true attaches column metadata to nodes and column edges to edges\n\tif r.URL.Query().Get(\"include_columns\") == \"true\" {\n\t\tenrichLineageNodes(h.Store, connID, nodes)\n\t\tenrichLineageEdges(h.Store, allEdges)\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"graph\": governance.LineageGraph{Nodes: nodes, Edges: allEdges},\n\t})\n}\n\nfunc (h *GovernanceHandler) GetLineageGraph(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tedges, err := h.Store.GetFullLineageGraph(connID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get lineage graph\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get lineage graph\"})\n\t\treturn\n\t}\n\tif edges == nil {\n\t\tedges = []governance.LineageEdge{}\n\t}\n\n\t// Build nodes from edges\n\tnodeMap := make(map[string]governance.LineageNode)\n\tfor _, e := range edges {\n\t\tsrcKey := e.SourceDatabase + \".\" + e.SourceTable\n\t\tif _, ok := nodeMap[srcKey]; !ok {\n\t\t\tnodeMap[srcKey] = governance.LineageNode{\n\t\t\t\tID: srcKey, Database: e.SourceDatabase, Table: e.SourceTable, Type: \"source\",\n\t\t\t}\n\t\t}\n\t\ttgtKey := e.TargetDatabase + \".\" + e.TargetTable\n\t\tif _, ok := nodeMap[tgtKey]; !ok {\n\t\t\tnodeMap[tgtKey] = governance.LineageNode{\n\t\t\t\tID: tgtKey, Database: e.TargetDatabase, Table: e.TargetTable, Type: \"target\",\n\t\t\t}\n\t\t}\n\t}\n\n\tnodes := make([]governance.LineageNode, 0, len(nodeMap))\n\tfor _, n := range nodeMap {\n\t\tnodes = append(nodes, n)\n\t}\n\n\t// Enrich: include_columns=true attaches column metadata to nodes and column edges to edges\n\tif r.URL.Query().Get(\"include_columns\") == \"true\" {\n\t\tenrichLineageNodes(h.Store, connID, nodes)\n\t\tenrichLineageEdges(h.Store, edges)\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"graph\": governance.LineageGraph{Nodes: nodes, Edges: edges},\n\t})\n}\n\n// enrichLineageNodes attaches column metadata from gov_columns to each node.\nfunc enrichLineageNodes(store *governance.Store, connID string, nodes []governance.LineageNode) {\n\tfor i := range nodes {\n\t\tcols, err := store.GetColumns(connID, nodes[i].Database, nodes[i].Table)\n\t\tif err != nil {\n\t\t\tslog.Warn(\"Failed to get columns for lineage node\", \"node\", nodes[i].ID, \"error\", err)\n\t\t\tcontinue\n\t\t}\n\t\tnodes[i].Columns = cols\n\t}\n}\n\n// enrichLineageEdges attaches column-level lineage edges to each table-level edge.\nfunc enrichLineageEdges(store *governance.Store, edges []governance.LineageEdge) {\n\tedgeIDs := make([]string, 0, len(edges))\n\tfor _, e := range edges {\n\t\tedgeIDs = append(edgeIDs, e.ID)\n\t}\n\n\tcolEdgeMap, err := store.GetColumnEdgesForEdgeIDs(edgeIDs)\n\tif err != nil {\n\t\tslog.Warn(\"Failed to get column lineage edges\", \"error\", err)\n\t\treturn\n\t}\n\n\tfor i := range edges {\n\t\tif colEdges, ok := colEdgeMap[edges[i].ID]; ok {\n\t\t\tedges[i].ColumnEdges = colEdges\n\t\t}\n\t}\n}\n\n// GetQueryByQueryID returns a single query log entry by ClickHouse query_id.\nfunc (h *GovernanceHandler) GetQueryByQueryID(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tqueryID := chi.URLParam(r, \"query_id\")\n\tif queryID == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"query_id is required\"})\n\t\treturn\n\t}\n\n\tentry, err := h.Store.GetQueryByQueryID(connID, queryID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Query not found\"})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"entry\": entry})\n}\n\n// ── Tags ─────────────────────────────────────────────────────────────────────\n\nfunc (h *GovernanceHandler) ListTags(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tdbName := r.URL.Query().Get(\"database\")\n\ttableName := r.URL.Query().Get(\"table\")\n\n\tvar tags []governance.TagEntry\n\tvar err error\n\n\tif dbName != \"\" && tableName != \"\" {\n\t\ttags, err = h.Store.GetTagsForTable(connID, dbName, tableName)\n\t} else {\n\t\ttags, err = h.Store.GetTags(connID)\n\t}\n\tif err != nil {\n\t\tslog.Error(\"Failed to list tags\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list tags\"})\n\t\treturn\n\t}\n\tif tags == nil {\n\t\ttags = []governance.TagEntry{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"tags\": tags})\n}\n\nfunc (h *GovernanceHandler) CreateTag(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tObjectType   string `json:\"object_type\"`\n\t\tDatabaseName string `json:\"database_name\"`\n\t\tTableName    string `json:\"table_name\"`\n\t\tColumnName   string `json:\"column_name\"`\n\t\tTag          string `json:\"tag\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\ttag := governance.SensitivityTag(strings.ToUpper(body.Tag))\n\tif !governance.ValidTags[tag] {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid tag. Valid: PII, FINANCIAL, INTERNAL, PUBLIC, CRITICAL\"})\n\t\treturn\n\t}\n\n\tif body.ObjectType != \"table\" && body.ObjectType != \"column\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"object_type must be 'table' or 'column'\"})\n\t\treturn\n\t}\n\n\tif body.DatabaseName == \"\" || body.TableName == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"database_name and table_name are required\"})\n\t\treturn\n\t}\n\n\tid, err := h.Store.CreateTag(\n\t\tsession.ConnectionID, body.ObjectType, body.DatabaseName, body.TableName,\n\t\tbody.ColumnName, tag, session.ClickhouseUser,\n\t)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create tag\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create tag\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.tag.created\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"%s on %s.%s\", tag, body.DatabaseName, body.TableName)),\n\t})\n\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"id\": id})\n}\n\nfunc (h *GovernanceHandler) DeleteTag(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Tag ID required\"})\n\t\treturn\n\t}\n\n\tif err := h.Store.DeleteTag(id); err != nil {\n\t\tslog.Error(\"Failed to delete tag\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete tag\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.tag.deleted\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"tag %s deleted\", id)),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\n// ── Access ───────────────────────────────────────────────────────────────────\n\nfunc (h *GovernanceHandler) ListChUsers(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tusers, err := h.Store.GetChUsers(connID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list CH users\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list users\"})\n\t\treturn\n\t}\n\tif users == nil {\n\t\tusers = []governance.ChUser{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"users\": users})\n}\n\nfunc (h *GovernanceHandler) CreateChUser(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tcreds, err := h.getCredentials(r)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName         string   `json:\"name\"`\n\t\tPassword     string   `json:\"password\"`\n\t\tAuthType     string   `json:\"auth_type\"`\n\t\tDefaultRoles []string `json:\"default_roles\"`\n\t\tIfNotExists  *bool    `json:\"if_not_exists\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"name is required\"})\n\t\treturn\n\t}\n\n\tauthType := strings.TrimSpace(strings.ToLower(body.AuthType))\n\tif authType == \"\" {\n\t\tif strings.TrimSpace(body.Password) == \"\" {\n\t\t\tauthType = \"no_password\"\n\t\t} else {\n\t\t\tauthType = \"plaintext_password\"\n\t\t}\n\t}\n\n\tswitch authType {\n\tcase \"no_password\", \"plaintext_password\", \"sha256_password\", \"double_sha1_password\":\n\tdefault:\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"auth_type must be one of: no_password, plaintext_password, sha256_password, double_sha1_password\"})\n\t\treturn\n\t}\n\tif authType != \"no_password\" && strings.TrimSpace(body.Password) == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"password is required for selected auth_type\"})\n\t\treturn\n\t}\n\n\tallRoles, roleNames, parseErr := parseDefaultRolesInput(body.DefaultRoles)\n\tif parseErr != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": parseErr.Error()})\n\t\treturn\n\t}\n\n\tvar createSQL strings.Builder\n\tcreateSQL.WriteString(\"CREATE USER \")\n\tif body.IfNotExists == nil || *body.IfNotExists {\n\t\tcreateSQL.WriteString(\"IF NOT EXISTS \")\n\t}\n\tcreateSQL.WriteString(escapeIdentifier(name))\n\tcreateSQL.WriteString(buildClickHouseCreateAuthClause(authType, body.Password))\n\n\tif err := h.executeClickHouseSQL(creds, createSQL.String()); err != nil {\n\t\tslog.Error(\"Failed to create ClickHouse user\", \"connection\", session.ConnectionID, \"name\", name, \"error\", err)\n\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tescapedRoles := make([]string, 0, len(roleNames))\n\tfor _, role := range roleNames {\n\t\tescapedRoles = append(escapedRoles, escapeIdentifier(role))\n\t}\n\tif len(escapedRoles) > 0 {\n\t\tgrantSQL := \"GRANT \" + strings.Join(escapedRoles, \", \") + \" TO \" + escapeIdentifier(name)\n\t\tif err := h.executeClickHouseSQL(creds, grantSQL); err != nil {\n\t\t\tslog.Error(\"ClickHouse user created but role grant failed\", \"connection\", session.ConnectionID, \"name\", name, \"error\", err)\n\t\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": fmt.Sprintf(\"user created but failed to grant roles: %v\", err)})\n\t\t\treturn\n\t\t}\n\t}\n\tif allRoles || len(escapedRoles) > 0 {\n\t\tdefaultRoleClause := \"ALL\"\n\t\tif !allRoles {\n\t\t\tdefaultRoleClause = strings.Join(escapedRoles, \", \")\n\t\t}\n\t\talterSQL := \"ALTER USER \" + escapeIdentifier(name) + \" DEFAULT ROLE \" + defaultRoleClause\n\t\tif err := h.executeClickHouseSQL(creds, alterSQL); err != nil {\n\t\t\tslog.Error(\"ClickHouse user created but default role assignment failed\", \"connection\", session.ConnectionID, \"name\", name, \"error\", err)\n\t\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": fmt.Sprintf(\"user created but failed to set default role: %v\", err)})\n\t\t\treturn\n\t\t}\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.access.user.created\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(name),\n\t})\n\n\th.triggerSyncAsync(*creds, governance.SyncAccess)\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"success\": true, \"name\": name})\n}\n\nfunc (h *GovernanceHandler) DeleteChUser(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tcreds, err := h.getCredentials(r)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(chi.URLParam(r, \"name\"))\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"name is required\"})\n\t\treturn\n\t}\n\n\tifExists := true\n\tif raw := strings.ToLower(strings.TrimSpace(r.URL.Query().Get(\"if_exists\"))); raw == \"false\" || raw == \"0\" {\n\t\tifExists = false\n\t}\n\n\tsql := \"DROP USER \"\n\tif ifExists {\n\t\tsql += \"IF EXISTS \"\n\t}\n\tsql += escapeIdentifier(name)\n\n\tif err := h.executeClickHouseSQL(creds, sql); err != nil {\n\t\tslog.Error(\"Failed to delete ClickHouse user\", \"connection\", session.ConnectionID, \"name\", name, \"error\", err)\n\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.access.user.deleted\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(name),\n\t})\n\n\th.triggerSyncAsync(*creds, governance.SyncAccess)\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *GovernanceHandler) ListChRoles(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\troles, err := h.Store.GetChRoles(connID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list CH roles\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list roles\"})\n\t\treturn\n\t}\n\tif roles == nil {\n\t\troles = []governance.ChRole{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"roles\": roles})\n}\n\nfunc (h *GovernanceHandler) GetAccessMatrix(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tuser := r.URL.Query().Get(\"user\")\n\tvar matrix []governance.AccessMatrixEntry\n\tvar err error\n\n\tif user != \"\" {\n\t\tmatrix, err = h.Store.GetAccessMatrixForUser(connID, user)\n\t} else {\n\t\tmatrix, err = h.Store.GetAccessMatrix(connID)\n\t}\n\tif err != nil {\n\t\tslog.Error(\"Failed to get access matrix\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get access matrix\"})\n\t\treturn\n\t}\n\tif matrix == nil {\n\t\tmatrix = []governance.AccessMatrixEntry{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"matrix\": matrix})\n}\n\nfunc (h *GovernanceHandler) GetOverPermissions(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tdays := queryIntBounded(r, \"days\", 30, 1, 3650)\n\tperms, err := h.Store.GetOverPermissionsWithDays(connID, days)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get over-permissions\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get over-permissions\"})\n\t\treturn\n\t}\n\tif perms == nil {\n\t\tperms = []governance.OverPermission{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"over_permissions\": perms})\n}\n\n// ── Policies ─────────────────────────────────────────────────────────────────\n\nfunc (h *GovernanceHandler) ListPolicies(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tpolicies, err := h.Store.GetPolicies(connID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list policies\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list policies\"})\n\t\treturn\n\t}\n\tif policies == nil {\n\t\tpolicies = []governance.Policy{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"policies\": policies})\n}\n\nfunc (h *GovernanceHandler) CreatePolicy(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName            string `json:\"name\"`\n\t\tDescription     string `json:\"description\"`\n\t\tObjectType      string `json:\"object_type\"`\n\t\tObjectDatabase  string `json:\"object_database\"`\n\t\tObjectTable     string `json:\"object_table\"`\n\t\tObjectColumn    string `json:\"object_column\"`\n\t\tRequiredRole    string `json:\"required_role\"`\n\t\tSeverity        string `json:\"severity\"`\n\t\tEnforcementMode string `json:\"enforcement_mode\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tif strings.TrimSpace(body.Name) == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Policy name is required\"})\n\t\treturn\n\t}\n\tif body.ObjectType != \"database\" && body.ObjectType != \"table\" && body.ObjectType != \"column\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"object_type must be database, table, or column\"})\n\t\treturn\n\t}\n\tif body.Severity == \"\" {\n\t\tbody.Severity = \"warn\"\n\t}\n\tenforcementMode, err := normalizePolicyEnforcementMode(body.EnforcementMode)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tid, err := h.Store.CreatePolicy(\n\t\tsession.ConnectionID, body.Name, body.Description, body.ObjectType,\n\t\tbody.ObjectDatabase, body.ObjectTable, body.ObjectColumn,\n\t\tbody.RequiredRole, body.Severity, enforcementMode, session.ClickhouseUser,\n\t)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create policy\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create policy\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.policy.created\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(body.Name),\n\t})\n\n\tpolicy, _ := h.Store.GetPolicyByID(id)\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"policy\": policy})\n}\n\nfunc (h *GovernanceHandler) GetPolicy(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tpolicy, err := h.Store.GetPolicyByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get policy\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get policy\"})\n\t\treturn\n\t}\n\tif policy == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Policy not found\"})\n\t\treturn\n\t}\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"policy\": policy})\n}\n\nfunc (h *GovernanceHandler) UpdatePolicy(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\n\tvar body struct {\n\t\tName            string `json:\"name\"`\n\t\tDescription     string `json:\"description\"`\n\t\tRequiredRole    string `json:\"required_role\"`\n\t\tSeverity        string `json:\"severity\"`\n\t\tEnforcementMode string `json:\"enforcement_mode\"`\n\t\tEnabled         *bool  `json:\"enabled\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tenforcementMode, err := normalizePolicyEnforcementMode(body.EnforcementMode)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tenabled := true\n\tif body.Enabled != nil {\n\t\tenabled = *body.Enabled\n\t}\n\n\tif err := h.Store.UpdatePolicy(id, body.Name, body.Description, body.RequiredRole, body.Severity, enforcementMode, enabled); err != nil {\n\t\tslog.Error(\"Failed to update policy\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update policy\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.policy.updated\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(id),\n\t})\n\n\tpolicy, _ := h.Store.GetPolicyByID(id)\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"policy\": policy})\n}\n\nfunc (h *GovernanceHandler) DeletePolicy(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\tif err := h.Store.DeletePolicy(id); err != nil {\n\t\tslog.Error(\"Failed to delete policy\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete policy\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.policy.deleted\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(id),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\n// ── Violations ───────────────────────────────────────────────────────────────\n\nfunc (h *GovernanceHandler) ListViolations(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tlimit := queryIntBounded(r, \"limit\", 50, 1, 500)\n\tpolicyID := r.URL.Query().Get(\"policy_id\")\n\n\tviolations, err := h.Store.GetViolations(connID, limit, policyID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list violations\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list violations\"})\n\t\treturn\n\t}\n\tif violations == nil {\n\t\tviolations = []governance.PolicyViolation{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"violations\": violations})\n}\n\nfunc (h *GovernanceHandler) CreateIncidentFromViolation(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tviolationID := strings.TrimSpace(chi.URLParam(r, \"id\"))\n\tif violationID == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"violation id is required\"})\n\t\treturn\n\t}\n\n\tviolation, err := h.Store.GetViolationByID(violationID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to load violation\", \"id\", violationID, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load violation\"})\n\t\treturn\n\t}\n\tif violation == nil || violation.ConnectionID != session.ConnectionID {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Violation not found\"})\n\t\treturn\n\t}\n\n\tincidentID, created, err := h.Store.UpsertIncidentFromViolation(\n\t\tsession.ConnectionID,\n\t\tviolation.ID,\n\t\tviolation.PolicyName,\n\t\tviolation.User,\n\t\tnormalizeIncidentSeverity(violation.Severity),\n\t\tviolation.ViolationDetail,\n\t)\n\tif err != nil {\n\t\tslog.Error(\"Failed to upsert incident from violation\", \"violation\", violation.ID, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create incident\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.incident.from_violation\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"violation=%s incident=%s created=%t\", violation.ID, incidentID, created)),\n\t})\n\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"incident_id\": incidentID, \"created\": created, \"success\": true})\n}\n\nfunc (h *GovernanceHandler) ListIncidents(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tstatus := strings.ToLower(strings.TrimSpace(r.URL.Query().Get(\"status\")))\n\tseverity := strings.ToLower(strings.TrimSpace(r.URL.Query().Get(\"severity\")))\n\tlimit := queryIntBounded(r, \"limit\", 100, 1, 1000)\n\n\tincidents, err := h.Store.ListIncidents(connID, status, severity, limit)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list incidents\", \"connection\", connID, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list incidents\"})\n\t\treturn\n\t}\n\tif incidents == nil {\n\t\tincidents = []governance.Incident{}\n\t}\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"incidents\": incidents})\n}\n\nfunc (h *GovernanceHandler) GetIncident(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\tid := strings.TrimSpace(chi.URLParam(r, \"id\"))\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"incident id is required\"})\n\t\treturn\n\t}\n\tincident, err := h.Store.GetIncidentByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to load incident\", \"id\", id, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load incident\"})\n\t\treturn\n\t}\n\tif incident == nil || incident.ConnectionID != connID {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Incident not found\"})\n\t\treturn\n\t}\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"incident\": incident})\n}\n\nfunc (h *GovernanceHandler) CreateIncident(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tSourceType string `json:\"source_type\"`\n\t\tSourceRef  string `json:\"source_ref\"`\n\t\tTitle      string `json:\"title\"`\n\t\tSeverity   string `json:\"severity\"`\n\t\tStatus     string `json:\"status\"`\n\t\tAssignee   string `json:\"assignee\"`\n\t\tDetails    string `json:\"details\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\ttitle := strings.TrimSpace(body.Title)\n\tif title == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"title is required\"})\n\t\treturn\n\t}\n\tseverity := normalizeIncidentSeverity(body.Severity)\n\tstatus := normalizeIncidentStatus(body.Status)\n\tsourceType := strings.TrimSpace(strings.ToLower(body.SourceType))\n\tif sourceType == \"\" {\n\t\tsourceType = \"manual\"\n\t}\n\tif sourceType != \"manual\" && sourceType != \"violation\" && sourceType != \"over_permission\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"source_type must be manual, violation, or over_permission\"})\n\t\treturn\n\t}\n\n\tid, err := h.Store.CreateIncident(\n\t\tsession.ConnectionID,\n\t\tsourceType,\n\t\tbody.SourceRef,\n\t\t\"\",\n\t\ttitle,\n\t\tseverity,\n\t\tstatus,\n\t\tbody.Assignee,\n\t\tbody.Details,\n\t\tsession.ClickhouseUser,\n\t)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create incident\", \"connection\", session.ConnectionID, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create incident\"})\n\t\treturn\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.incident.created\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(id),\n\t})\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"id\": id, \"success\": true})\n}\n\nfunc (h *GovernanceHandler) UpdateIncident(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\tid := strings.TrimSpace(chi.URLParam(r, \"id\"))\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"incident id is required\"})\n\t\treturn\n\t}\n\texisting, err := h.Store.GetIncidentByID(id)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load incident\"})\n\t\treturn\n\t}\n\tif existing == nil || existing.ConnectionID != session.ConnectionID {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Incident not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tTitle          *string `json:\"title\"`\n\t\tSeverity       *string `json:\"severity\"`\n\t\tStatus         *string `json:\"status\"`\n\t\tAssignee       *string `json:\"assignee\"`\n\t\tDetails        *string `json:\"details\"`\n\t\tResolutionNote *string `json:\"resolution_note\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\ttitle := existing.Title\n\tif body.Title != nil {\n\t\ttitle = strings.TrimSpace(*body.Title)\n\t}\n\tif title == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"title is required\"})\n\t\treturn\n\t}\n\n\tseverity := existing.Severity\n\tif body.Severity != nil {\n\t\tseverity = normalizeIncidentSeverity(*body.Severity)\n\t}\n\n\tstatus := existing.Status\n\tif body.Status != nil {\n\t\tstatus = normalizeIncidentStatus(*body.Status)\n\t}\n\n\tassignee := derefString(existing.Assignee)\n\tif body.Assignee != nil {\n\t\tassignee = strings.TrimSpace(*body.Assignee)\n\t}\n\tdetails := derefString(existing.Details)\n\tif body.Details != nil {\n\t\tdetails = strings.TrimSpace(*body.Details)\n\t}\n\tresolution := derefString(existing.ResolutionNote)\n\tif body.ResolutionNote != nil {\n\t\tresolution = strings.TrimSpace(*body.ResolutionNote)\n\t}\n\n\tif err := h.Store.UpdateIncident(id, title, severity, status, assignee, details, resolution); err != nil {\n\t\tslog.Error(\"Failed to update incident\", \"id\", id, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update incident\"})\n\t\treturn\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.incident.updated\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(id),\n\t})\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *GovernanceHandler) ListIncidentComments(w http.ResponseWriter, r *http.Request) {\n\tconnID := h.connectionID(r)\n\tif connID == \"\" {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\tid := strings.TrimSpace(chi.URLParam(r, \"id\"))\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"incident id is required\"})\n\t\treturn\n\t}\n\tincident, err := h.Store.GetIncidentByID(id)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load incident\"})\n\t\treturn\n\t}\n\tif incident == nil || incident.ConnectionID != connID {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Incident not found\"})\n\t\treturn\n\t}\n\tcomments, err := h.Store.ListIncidentComments(id, 500)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list incident comments\", \"id\", id, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list incident comments\"})\n\t\treturn\n\t}\n\tif comments == nil {\n\t\tcomments = []governance.IncidentComment{}\n\t}\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"comments\": comments})\n}\n\nfunc (h *GovernanceHandler) CreateIncidentComment(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\tid := strings.TrimSpace(chi.URLParam(r, \"id\"))\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"incident id is required\"})\n\t\treturn\n\t}\n\tincident, err := h.Store.GetIncidentByID(id)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load incident\"})\n\t\treturn\n\t}\n\tif incident == nil || incident.ConnectionID != session.ConnectionID {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Incident not found\"})\n\t\treturn\n\t}\n\tvar body struct {\n\t\tCommentText string `json:\"comment_text\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\tcomment := strings.TrimSpace(body.CommentText)\n\tif comment == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"comment_text is required\"})\n\t\treturn\n\t}\n\tif len(comment) > 4000 {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"comment_text must be <= 4000 characters\"})\n\t\treturn\n\t}\n\tcommentID, err := h.Store.CreateIncidentComment(id, comment, session.ClickhouseUser)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create incident comment\", \"id\", id, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create incident comment\"})\n\t\treturn\n\t}\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"governance.incident.comment.created\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(id),\n\t})\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"id\": commentID, \"success\": true})\n}\n\nfunc normalizeIncidentSeverity(v string) string {\n\tswitch strings.ToLower(strings.TrimSpace(v)) {\n\tcase \"info\", \"warn\", \"error\", \"critical\":\n\t\treturn strings.ToLower(strings.TrimSpace(v))\n\tdefault:\n\t\treturn \"warn\"\n\t}\n}\n\nfunc normalizePolicyEnforcementMode(v string) (string, error) {\n\tmode := strings.ToLower(strings.TrimSpace(v))\n\tswitch mode {\n\tcase \"\", \"warn\":\n\t\treturn \"warn\", nil\n\tcase \"block\":\n\t\treturn \"block\", nil\n\tdefault:\n\t\treturn \"\", fmt.Errorf(\"enforcement_mode must be warn or block\")\n\t}\n}\n\nfunc normalizeIncidentStatus(v string) string {\n\tswitch strings.ToLower(strings.TrimSpace(v)) {\n\tcase \"open\", \"triaged\", \"in_progress\", \"resolved\", \"dismissed\":\n\t\treturn strings.ToLower(strings.TrimSpace(v))\n\tdefault:\n\t\treturn \"open\"\n\t}\n}\n\nfunc derefString(v *string) string {\n\tif v == nil {\n\t\treturn \"\"\n\t}\n\treturn *v\n}\n"
  },
  {
    "path": "internal/server/handlers/governance_alerts.go",
    "content": "package handlers\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"regexp\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/alerts\"\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/go-chi/chi/v5\"\n)\n\nvar emailRegex = regexp.MustCompile(`^[^@\\s]+@[^@\\s]+\\.[^@\\s]+$`)\n\ntype alertRuleRoutePayload struct {\n\tChannelID               string   `json:\"channel_id\"`\n\tRecipients              []string `json:\"recipients\"`\n\tIsActive                *bool    `json:\"is_active\"`\n\tDeliveryMode            string   `json:\"delivery_mode\"`\n\tDigestWindowMinutes     *int     `json:\"digest_window_minutes\"`\n\tEscalationChannelID     *string  `json:\"escalation_channel_id\"`\n\tEscalationRecipients    []string `json:\"escalation_recipients\"`\n\tEscalationAfterFailures *int     `json:\"escalation_after_failures\"`\n}\n\ntype alertRuleResponse struct {\n\tdatabase.AlertRule\n\tRoutes []database.AlertRuleRouteView `json:\"routes\"`\n}\n\nfunc (h *GovernanceHandler) ListAlertChannels(w http.ResponseWriter, r *http.Request) {\n\tchannels, err := h.DB.ListAlertChannels()\n\tif err != nil {\n\t\tslog.Error(\"Failed to list alert channels\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list alert channels\"})\n\t\treturn\n\t}\n\n\ttype responseItem struct {\n\t\tdatabase.AlertChannel\n\t\tConfig    map[string]interface{} `json:\"config\"`\n\t\tHasSecret bool                   `json:\"has_secret\"`\n\t}\n\n\tout := make([]responseItem, 0, len(channels))\n\tfor _, channel := range channels {\n\t\tdecrypted, err := crypto.Decrypt(channel.ConfigEncrypted, h.Config.AppSecretKey)\n\t\tif err != nil {\n\t\t\tslog.Warn(\"Failed to decrypt alert channel config\", \"channel\", channel.ID, \"error\", err)\n\t\t\tcontinue\n\t\t}\n\t\tcfg := map[string]interface{}{}\n\t\tif err := json.Unmarshal([]byte(decrypted), &cfg); err != nil {\n\t\t\tslog.Warn(\"Failed to parse alert channel config\", \"channel\", channel.ID, \"error\", err)\n\t\t\tcontinue\n\t\t}\n\t\tsanitized, hasSecret := sanitizeChannelConfig(channel.ChannelType, cfg)\n\t\tout = append(out, responseItem{\n\t\t\tAlertChannel: channel,\n\t\t\tConfig:       sanitized,\n\t\t\tHasSecret:    hasSecret,\n\t\t})\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"channels\": out})\n}\n\nfunc (h *GovernanceHandler) CreateAlertChannel(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName        string                 `json:\"name\"`\n\t\tChannelType string                 `json:\"channel_type\"`\n\t\tConfig      map[string]interface{} `json:\"config\"`\n\t\tIsActive    *bool                  `json:\"is_active\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\tchannelType := strings.ToLower(strings.TrimSpace(body.ChannelType))\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"name is required\"})\n\t\treturn\n\t}\n\tif !isSupportedChannelType(channelType) {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"channel_type must be smtp, resend, or brevo\"})\n\t\treturn\n\t}\n\tif err := validateChannelConfig(channelType, body.Config, false); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\trawConfig, _ := json.Marshal(body.Config)\n\tencrypted, err := crypto.Encrypt(string(rawConfig), h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to encrypt alert channel config\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to store alert channel config\"})\n\t\treturn\n\t}\n\n\tisActive := true\n\tif body.IsActive != nil {\n\t\tisActive = *body.IsActive\n\t}\n\n\tid, err := h.DB.CreateAlertChannel(name, channelType, encrypted, isActive, session.ClickhouseUser)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create alert channel\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create alert channel\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"alerts.channel.created\",\n\t\tUsername:  strPtr(session.ClickhouseUser),\n\t\tDetails:   strPtr(fmt.Sprintf(\"%s (%s)\", name, channelType)),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"id\": id, \"success\": true})\n}\n\nfunc (h *GovernanceHandler) UpdateAlertChannel(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\tchannel, err := h.DB.GetAlertChannelByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to load alert channel\", \"id\", id, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load alert channel\"})\n\t\treturn\n\t}\n\tif channel == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Alert channel not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName        *string                `json:\"name\"`\n\t\tChannelType *string                `json:\"channel_type\"`\n\t\tConfig      map[string]interface{} `json:\"config\"`\n\t\tIsActive    *bool                  `json:\"is_active\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := channel.Name\n\tif body.Name != nil {\n\t\tname = strings.TrimSpace(*body.Name)\n\t}\n\tchannelType := channel.ChannelType\n\tif body.ChannelType != nil {\n\t\tchannelType = strings.ToLower(strings.TrimSpace(*body.ChannelType))\n\t}\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"name is required\"})\n\t\treturn\n\t}\n\tif !isSupportedChannelType(channelType) {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"channel_type must be smtp, resend, or brevo\"})\n\t\treturn\n\t}\n\n\tisActive := channel.IsActive\n\tif body.IsActive != nil {\n\t\tisActive = *body.IsActive\n\t}\n\n\tvar encryptedConfig *string\n\tif body.Config != nil {\n\t\tif err := validateChannelConfig(channelType, body.Config, true); err != nil {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\t\treturn\n\t\t}\n\t\trawConfig, _ := json.Marshal(body.Config)\n\t\tenc, err := crypto.Encrypt(string(rawConfig), h.Config.AppSecretKey)\n\t\tif err != nil {\n\t\t\tslog.Error(\"Failed to encrypt alert channel config\", \"id\", id, \"error\", err)\n\t\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to store alert channel config\"})\n\t\t\treturn\n\t\t}\n\t\tencryptedConfig = &enc\n\t}\n\n\tif err := h.DB.UpdateAlertChannel(id, name, channelType, encryptedConfig, isActive); err != nil {\n\t\tslog.Error(\"Failed to update alert channel\", \"id\", id, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update alert channel\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"alerts.channel.updated\",\n\t\tUsername:  strPtr(session.ClickhouseUser),\n\t\tDetails:   strPtr(fmt.Sprintf(\"%s (%s)\", name, channelType)),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *GovernanceHandler) DeleteAlertChannel(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\tchannel, err := h.DB.GetAlertChannelByID(id)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load alert channel\"})\n\t\treturn\n\t}\n\tif channel == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Alert channel not found\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.DeleteAlertChannel(id); err != nil {\n\t\tslog.Error(\"Failed to delete alert channel\", \"id\", id, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete alert channel\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"alerts.channel.deleted\",\n\t\tUsername:  strPtr(session.ClickhouseUser),\n\t\tDetails:   strPtr(fmt.Sprintf(\"%s (%s)\", channel.Name, channel.ChannelType)),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *GovernanceHandler) TestAlertChannel(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tchannel, err := h.DB.GetAlertChannelByID(id)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load alert channel\"})\n\t\treturn\n\t}\n\tif channel == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Alert channel not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tRecipients []string `json:\"recipients\"`\n\t\tSubject    string   `json:\"subject\"`\n\t\tMessage    string   `json:\"message\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\trecipients, err := validateRecipients(body.Recipients)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tdecrypted, err := crypto.Decrypt(channel.ConfigEncrypted, h.Config.AppSecretKey)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to decrypt alert channel config\"})\n\t\treturn\n\t}\n\tcfg := map[string]interface{}{}\n\tif err := json.Unmarshal([]byte(decrypted), &cfg); err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to parse alert channel config\"})\n\t\treturn\n\t}\n\n\tsubject := strings.TrimSpace(body.Subject)\n\tif subject == \"\" {\n\t\tsubject = \"CH-UI Alert Channel Test\"\n\t}\n\tmessage := strings.TrimSpace(body.Message)\n\tif message == \"\" {\n\t\tmessage = \"This is a test notification from CH-UI.\"\n\t}\n\n\tmsgID, err := alerts.SendDirect(context.Background(), channel.ChannelType, cfg, recipients, subject, message)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true, \"provider_message_id\": msgID})\n}\n\nfunc (h *GovernanceHandler) ListAlertRules(w http.ResponseWriter, r *http.Request) {\n\trules, err := h.DB.ListAlertRules()\n\tif err != nil {\n\t\tslog.Error(\"Failed to list alert rules\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list alert rules\"})\n\t\treturn\n\t}\n\n\tout := make([]alertRuleResponse, 0, len(rules))\n\tfor _, rule := range rules {\n\t\troutes, err := h.DB.ListAlertRuleRoutes(rule.ID)\n\t\tif err != nil {\n\t\t\tslog.Warn(\"Failed to load alert rule routes\", \"rule\", rule.ID, \"error\", err)\n\t\t\troutes = []database.AlertRuleRouteView{}\n\t\t}\n\t\tout = append(out, alertRuleResponse{\n\t\t\tAlertRule: rule,\n\t\t\tRoutes:    routes,\n\t\t})\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"rules\": out})\n}\n\nfunc (h *GovernanceHandler) CreateAlertRule(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName            string                  `json:\"name\"`\n\t\tEventType       string                  `json:\"event_type\"`\n\t\tSeverityMin     string                  `json:\"severity_min\"`\n\t\tEnabled         *bool                   `json:\"enabled\"`\n\t\tCooldownSeconds *int                    `json:\"cooldown_seconds\"`\n\t\tMaxAttempts     *int                    `json:\"max_attempts\"`\n\t\tSubjectTemplate string                  `json:\"subject_template\"`\n\t\tBodyTemplate    string                  `json:\"body_template\"`\n\t\tRoutes          []alertRuleRoutePayload `json:\"routes\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\teventType := strings.ToLower(strings.TrimSpace(body.EventType))\n\tseverityMin := strings.ToLower(strings.TrimSpace(body.SeverityMin))\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"name is required\"})\n\t\treturn\n\t}\n\tif !isSupportedEventType(eventType) {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"event_type must be policy.violation, schedule.failed, schedule.slow, or *\"})\n\t\treturn\n\t}\n\tif !isSupportedSeverity(severityMin) {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"severity_min must be info, warn, error, or critical\"})\n\t\treturn\n\t}\n\n\tenabled := true\n\tif body.Enabled != nil {\n\t\tenabled = *body.Enabled\n\t}\n\tcooldownSeconds := 300\n\tif body.CooldownSeconds != nil {\n\t\tcooldownSeconds = *body.CooldownSeconds\n\t}\n\tmaxAttempts := 5\n\tif body.MaxAttempts != nil {\n\t\tmaxAttempts = *body.MaxAttempts\n\t}\n\n\troutes, err := h.validateRuleRoutes(body.Routes)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tid, err := h.DB.CreateAlertRule(name, eventType, severityMin, enabled, cooldownSeconds, maxAttempts, body.SubjectTemplate, body.BodyTemplate, session.ClickhouseUser)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create alert rule\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create alert rule\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.ReplaceAlertRuleRoutes(id, routes); err != nil {\n\t\t_ = h.DB.DeleteAlertRule(id)\n\t\tslog.Error(\"Failed to create alert rule routes\", \"rule\", id, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create alert rule routes\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"alerts.rule.created\",\n\t\tUsername:  strPtr(session.ClickhouseUser),\n\t\tDetails:   strPtr(name),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"id\": id, \"success\": true})\n}\n\nfunc (h *GovernanceHandler) UpdateAlertRule(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\texisting, err := h.DB.GetAlertRuleByID(id)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load alert rule\"})\n\t\treturn\n\t}\n\tif existing == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Alert rule not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName            *string                  `json:\"name\"`\n\t\tEventType       *string                  `json:\"event_type\"`\n\t\tSeverityMin     *string                  `json:\"severity_min\"`\n\t\tEnabled         *bool                    `json:\"enabled\"`\n\t\tCooldownSeconds *int                     `json:\"cooldown_seconds\"`\n\t\tMaxAttempts     *int                     `json:\"max_attempts\"`\n\t\tSubjectTemplate *string                  `json:\"subject_template\"`\n\t\tBodyTemplate    *string                  `json:\"body_template\"`\n\t\tRoutes          *[]alertRuleRoutePayload `json:\"routes\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := existing.Name\n\tif body.Name != nil {\n\t\tname = strings.TrimSpace(*body.Name)\n\t}\n\teventType := existing.EventType\n\tif body.EventType != nil {\n\t\teventType = strings.ToLower(strings.TrimSpace(*body.EventType))\n\t}\n\tseverityMin := existing.SeverityMin\n\tif body.SeverityMin != nil {\n\t\tseverityMin = strings.ToLower(strings.TrimSpace(*body.SeverityMin))\n\t}\n\tenabled := existing.Enabled\n\tif body.Enabled != nil {\n\t\tenabled = *body.Enabled\n\t}\n\tcooldownSeconds := existing.CooldownSeconds\n\tif body.CooldownSeconds != nil {\n\t\tcooldownSeconds = *body.CooldownSeconds\n\t}\n\tmaxAttempts := existing.MaxAttempts\n\tif body.MaxAttempts != nil {\n\t\tmaxAttempts = *body.MaxAttempts\n\t}\n\tsubjectTemplate := coalesceStringPtr(body.SubjectTemplate, existing.SubjectTemplate)\n\tbodyTemplate := coalesceStringPtr(body.BodyTemplate, existing.BodyTemplate)\n\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"name is required\"})\n\t\treturn\n\t}\n\tif !isSupportedEventType(eventType) {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"event_type must be policy.violation, schedule.failed, schedule.slow, or *\"})\n\t\treturn\n\t}\n\tif !isSupportedSeverity(severityMin) {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"severity_min must be info, warn, error, or critical\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.UpdateAlertRule(id, name, eventType, severityMin, enabled, cooldownSeconds, maxAttempts, subjectTemplate, bodyTemplate); err != nil {\n\t\tslog.Error(\"Failed to update alert rule\", \"id\", id, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update alert rule\"})\n\t\treturn\n\t}\n\n\tif body.Routes != nil {\n\t\troutes, err := h.validateRuleRoutes(*body.Routes)\n\t\tif err != nil {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\t\treturn\n\t\t}\n\t\tif err := h.DB.ReplaceAlertRuleRoutes(id, routes); err != nil {\n\t\t\tslog.Error(\"Failed to replace alert rule routes\", \"rule\", id, \"error\", err)\n\t\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update alert rule routes\"})\n\t\t\treturn\n\t\t}\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"alerts.rule.updated\",\n\t\tUsername:  strPtr(session.ClickhouseUser),\n\t\tDetails:   strPtr(name),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *GovernanceHandler) DeleteAlertRule(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\texisting, err := h.DB.GetAlertRuleByID(id)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load alert rule\"})\n\t\treturn\n\t}\n\tif existing == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Alert rule not found\"})\n\t\treturn\n\t}\n\tif err := h.DB.DeleteAlertRule(id); err != nil {\n\t\tslog.Error(\"Failed to delete alert rule\", \"id\", id, \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete alert rule\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:    \"alerts.rule.deleted\",\n\t\tUsername:  strPtr(session.ClickhouseUser),\n\t\tDetails:   strPtr(existing.Name),\n\t\tIPAddress: strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\nfunc (h *GovernanceHandler) ListAlertEvents(w http.ResponseWriter, r *http.Request) {\n\tlimit := 100\n\tif raw := strings.TrimSpace(r.URL.Query().Get(\"limit\")); raw != \"\" {\n\t\tif n, err := strconv.Atoi(raw); err == nil {\n\t\t\tlimit = n\n\t\t}\n\t}\n\teventType := strings.TrimSpace(r.URL.Query().Get(\"event_type\"))\n\tstatus := strings.TrimSpace(r.URL.Query().Get(\"status\"))\n\tevents, err := h.DB.ListAlertEvents(limit, eventType, status)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list alert events\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list alert events\"})\n\t\treturn\n\t}\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"events\": events})\n}\n\nfunc (h *GovernanceHandler) validateRuleRoutes(payload []alertRuleRoutePayload) ([]database.AlertRuleRoute, error) {\n\troutes := make([]database.AlertRuleRoute, 0, len(payload))\n\tfor _, item := range payload {\n\t\tchannelID := strings.TrimSpace(item.ChannelID)\n\t\tif channelID == \"\" {\n\t\t\treturn nil, fmt.Errorf(\"route channel_id is required\")\n\t\t}\n\t\tchannel, err := h.DB.GetAlertChannelByID(channelID)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"failed to load channel %s\", channelID)\n\t\t}\n\t\tif channel == nil {\n\t\t\treturn nil, fmt.Errorf(\"channel %s not found\", channelID)\n\t\t}\n\t\trecipients, err := validateRecipients(item.Recipients)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"route channel %s: %w\", channelID, err)\n\t\t}\n\t\tactive := true\n\t\tif item.IsActive != nil {\n\t\t\tactive = *item.IsActive\n\t\t}\n\t\tdeliveryMode := strings.ToLower(strings.TrimSpace(item.DeliveryMode))\n\t\tif deliveryMode == \"\" {\n\t\t\tdeliveryMode = \"immediate\"\n\t\t}\n\t\tif deliveryMode != \"immediate\" && deliveryMode != \"digest\" {\n\t\t\treturn nil, fmt.Errorf(\"route channel %s: delivery_mode must be immediate or digest\", channelID)\n\t\t}\n\t\tdigestWindow := 0\n\t\tif item.DigestWindowMinutes != nil {\n\t\t\tdigestWindow = *item.DigestWindowMinutes\n\t\t}\n\t\tif digestWindow < 0 || digestWindow > 1440 {\n\t\t\treturn nil, fmt.Errorf(\"route channel %s: digest_window_minutes must be between 0 and 1440\", channelID)\n\t\t}\n\t\tif deliveryMode == \"digest\" && digestWindow == 0 {\n\t\t\tdigestWindow = 15\n\t\t}\n\n\t\tvar escalationChannelID *string\n\t\tif item.EscalationChannelID != nil && strings.TrimSpace(*item.EscalationChannelID) != \"\" {\n\t\t\tescID := strings.TrimSpace(*item.EscalationChannelID)\n\t\t\tescalationChannel, err := h.DB.GetAlertChannelByID(escID)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"route channel %s: failed to load escalation channel %s\", channelID, escID)\n\t\t\t}\n\t\t\tif escalationChannel == nil {\n\t\t\t\treturn nil, fmt.Errorf(\"route channel %s: escalation channel %s not found\", channelID, escID)\n\t\t\t}\n\t\t\tescalationChannelID = &escID\n\t\t}\n\t\tescalationRecipients := []string{}\n\t\tif len(item.EscalationRecipients) > 0 {\n\t\t\tescalationRecipients, err = validateRecipients(item.EscalationRecipients)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"route channel %s escalation_recipients: %w\", channelID, err)\n\t\t\t}\n\t\t}\n\t\tescalationAfterFailures := 0\n\t\tif item.EscalationAfterFailures != nil {\n\t\t\tescalationAfterFailures = *item.EscalationAfterFailures\n\t\t}\n\t\tif escalationAfterFailures < 0 || escalationAfterFailures > 10 {\n\t\t\treturn nil, fmt.Errorf(\"route channel %s: escalation_after_failures must be between 0 and 10\", channelID)\n\t\t}\n\t\troutes = append(routes, database.AlertRuleRoute{\n\t\t\tChannelID:               channelID,\n\t\t\tRecipients:              recipients,\n\t\t\tIsActive:                active,\n\t\t\tDeliveryMode:            deliveryMode,\n\t\t\tDigestWindowMinutes:     digestWindow,\n\t\t\tEscalationChannelID:     escalationChannelID,\n\t\t\tEscalationRecipients:    escalationRecipients,\n\t\t\tEscalationAfterFailures: escalationAfterFailures,\n\t\t})\n\t}\n\treturn routes, nil\n}\n\nfunc sanitizeChannelConfig(channelType string, cfg map[string]interface{}) (map[string]interface{}, bool) {\n\tout := make(map[string]interface{}, len(cfg))\n\tfor k, v := range cfg {\n\t\tout[k] = v\n\t}\n\thasSecret := false\n\tswitch strings.ToLower(strings.TrimSpace(channelType)) {\n\tcase alerts.ChannelTypeSMTP:\n\t\tif _, ok := out[\"password\"]; ok {\n\t\t\thasSecret = strings.TrimSpace(fmt.Sprintf(\"%v\", out[\"password\"])) != \"\"\n\t\t\tout[\"password\"] = \"\"\n\t\t}\n\tcase alerts.ChannelTypeResend, alerts.ChannelTypeBrevo:\n\t\tif _, ok := out[\"api_key\"]; ok {\n\t\t\thasSecret = strings.TrimSpace(fmt.Sprintf(\"%v\", out[\"api_key\"])) != \"\"\n\t\t\tout[\"api_key\"] = \"\"\n\t\t}\n\t}\n\treturn out, hasSecret\n}\n\nfunc validateChannelConfig(channelType string, cfg map[string]interface{}, allowEmptySecret bool) error {\n\tif cfg == nil {\n\t\treturn fmt.Errorf(\"config is required\")\n\t}\n\tget := func(key string) string {\n\t\traw := strings.TrimSpace(fmt.Sprintf(\"%v\", cfg[key]))\n\t\tif raw == \"<nil>\" {\n\t\t\treturn \"\"\n\t\t}\n\t\treturn raw\n\t}\n\n\tswitch strings.ToLower(strings.TrimSpace(channelType)) {\n\tcase alerts.ChannelTypeSMTP:\n\t\tif get(\"host\") == \"\" {\n\t\t\treturn fmt.Errorf(\"smtp config requires host\")\n\t\t}\n\t\tif get(\"from_email\") == \"\" {\n\t\t\treturn fmt.Errorf(\"smtp config requires from_email\")\n\t\t}\n\t\tif !allowEmptySecret && get(\"username\") != \"\" && get(\"password\") == \"\" {\n\t\t\treturn fmt.Errorf(\"smtp config requires password when username is set\")\n\t\t}\n\tcase alerts.ChannelTypeResend, alerts.ChannelTypeBrevo:\n\t\tif get(\"from_email\") == \"\" {\n\t\t\treturn fmt.Errorf(\"%s config requires from_email\", channelType)\n\t\t}\n\t\tif !allowEmptySecret && get(\"api_key\") == \"\" {\n\t\t\treturn fmt.Errorf(\"%s config requires api_key\", channelType)\n\t\t}\n\tdefault:\n\t\treturn fmt.Errorf(\"unsupported channel type: %s\", channelType)\n\t}\n\treturn nil\n}\n\nfunc validateRecipients(values []string) ([]string, error) {\n\tif len(values) == 0 {\n\t\treturn nil, fmt.Errorf(\"at least one recipient is required\")\n\t}\n\tout := make([]string, 0, len(values))\n\tfor _, raw := range values {\n\t\temail := strings.TrimSpace(strings.ToLower(raw))\n\t\tif email == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tif !emailRegex.MatchString(email) {\n\t\t\treturn nil, fmt.Errorf(\"invalid recipient email: %s\", raw)\n\t\t}\n\t\tout = append(out, email)\n\t}\n\tif len(out) == 0 {\n\t\treturn nil, fmt.Errorf(\"at least one valid recipient is required\")\n\t}\n\treturn out, nil\n}\n\nfunc isSupportedChannelType(v string) bool {\n\tswitch strings.ToLower(strings.TrimSpace(v)) {\n\tcase alerts.ChannelTypeSMTP, alerts.ChannelTypeResend, alerts.ChannelTypeBrevo:\n\t\treturn true\n\tdefault:\n\t\treturn false\n\t}\n}\n\nfunc isSupportedEventType(v string) bool {\n\tswitch strings.ToLower(strings.TrimSpace(v)) {\n\tcase \"*\", \"any\", alerts.EventTypePolicyViolation, alerts.EventTypeScheduleFailed, alerts.EventTypeScheduleSlow:\n\t\treturn true\n\tdefault:\n\t\treturn false\n\t}\n}\n\nfunc isSupportedSeverity(v string) bool {\n\tswitch strings.ToLower(strings.TrimSpace(v)) {\n\tcase alerts.SeverityInfo, alerts.SeverityWarn, alerts.SeverityError, alerts.SeverityCritical:\n\t\treturn true\n\tdefault:\n\t\treturn false\n\t}\n}\n\nfunc coalesceStringPtr(v *string, fallback *string) string {\n\tif v != nil {\n\t\treturn strings.TrimSpace(*v)\n\t}\n\tif fallback == nil {\n\t\treturn \"\"\n\t}\n\treturn strings.TrimSpace(*fallback)\n}\n"
  },
  {
    "path": "internal/server/handlers/governance_auditlog.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n)\n\n// ---------- GET /audit-logs ----------\n\nfunc (h *GovernanceHandler) GetAuditLogs(w http.ResponseWriter, r *http.Request) {\n\tlimit := 100\n\tif l := r.URL.Query().Get(\"limit\"); l != \"\" {\n\t\tif parsed, err := strconv.Atoi(l); err == nil && parsed > 0 {\n\t\t\tlimit = parsed\n\t\t}\n\t}\n\tif limit > 1000 {\n\t\tlimit = 1000\n\t}\n\n\ttimeRange := strings.TrimSpace(r.URL.Query().Get(\"timeRange\"))\n\taction := strings.TrimSpace(r.URL.Query().Get(\"action\"))\n\tusername := strings.TrimSpace(r.URL.Query().Get(\"username\"))\n\tsearch := strings.TrimSpace(r.URL.Query().Get(\"search\"))\n\n\tlogs, err := h.DB.GetAuditLogsFiltered(limit, timeRange, action, username, search)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get audit logs\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to retrieve audit logs\"})\n\t\treturn\n\t}\n\n\tif logs == nil {\n\t\tlogs = []database.AuditLog{}\n\t}\n\n\ttype auditLogResponse struct {\n\t\tdatabase.AuditLog\n\t\tParsedDetails interface{} `json:\"parsed_details,omitempty\"`\n\t}\n\n\tresults := make([]auditLogResponse, 0, len(logs))\n\tfor _, log := range logs {\n\t\tentry := auditLogResponse{AuditLog: log}\n\t\tif log.Details != nil && *log.Details != \"\" {\n\t\t\tvar parsed interface{}\n\t\t\tif err := json.Unmarshal([]byte(*log.Details), &parsed); err == nil {\n\t\t\t\tentry.ParsedDetails = parsed\n\t\t\t}\n\t\t}\n\t\tresults = append(results, entry)\n\t}\n\n\twriteJSON(w, http.StatusOK, results)\n}\n"
  },
  {
    "path": "internal/server/handlers/governance_querylog.go",
    "content": "package handlers\n\nimport (\n\t\"fmt\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n)\n\n// ---------- GET /clickhouse-query-log ----------\n\nvar timeRangeDurations = map[string]string{\n\t\"5m\": \"5 MINUTE\", \"15m\": \"15 MINUTE\", \"30m\": \"30 MINUTE\",\n\t\"1h\": \"1 HOUR\", \"6h\": \"6 HOUR\", \"12h\": \"12 HOUR\",\n\t\"24h\": \"24 HOUR\", \"3d\": \"3 DAY\", \"7d\": \"7 DAY\",\n}\n\nfunc (h *GovernanceHandler) GetClickHouseQueryLog(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tif !h.Gateway.IsTunnelOnline(session.ConnectionID) {\n\t\twriteJSON(w, http.StatusServiceUnavailable, map[string]string{\"error\": \"Tunnel is offline\"})\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to decrypt credentials\"})\n\t\treturn\n\t}\n\n\ttimeRange := r.URL.Query().Get(\"timeRange\")\n\tlimitStr := r.URL.Query().Get(\"limit\")\n\toffsetStr := r.URL.Query().Get(\"offset\")\n\tsearch := r.URL.Query().Get(\"search\")\n\tqueryKind := r.URL.Query().Get(\"queryKind\")\n\tstatus := r.URL.Query().Get(\"status\")\n\n\tlimit := 100\n\tif limitStr != \"\" {\n\t\tif parsed, err := strconv.Atoi(limitStr); err == nil && parsed > 0 {\n\t\t\tlimit = parsed\n\t\t}\n\t}\n\tif limit > 1000 {\n\t\tlimit = 1000\n\t}\n\n\toffset := 0\n\tif offsetStr != \"\" {\n\t\tif parsed, err := strconv.Atoi(offsetStr); err == nil && parsed >= 0 {\n\t\t\toffset = parsed\n\t\t}\n\t}\n\n\tvar prewhereConditions []string\n\tvar whereConditions []string\n\n\tif timeRange != \"\" {\n\t\tif duration, ok := timeRangeDurations[timeRange]; ok {\n\t\t\tprewhereConditions = append(prewhereConditions,\n\t\t\t\tfmt.Sprintf(\"event_time >= now() - INTERVAL %s\", duration))\n\t\t}\n\t}\n\n\tif search != \"\" {\n\t\tescaped := escapeString(search)\n\t\twhereConditions = append(whereConditions,\n\t\t\tfmt.Sprintf(\"(query ILIKE '%%%s%%' OR user ILIKE '%%%s%%')\", escaped, escaped))\n\t}\n\n\tif queryKind != \"\" {\n\t\tnormalized := strings.ToLower(strings.TrimSpace(queryKind))\n\t\tif normalized != \"\" && normalized != \"all\" {\n\t\t\tescaped := escapeString(normalized)\n\t\t\twhereConditions = append(whereConditions,\n\t\t\t\tfmt.Sprintf(\"lowerUTF8(query_kind) = '%s'\", escaped))\n\t\t}\n\t}\n\n\tif status != \"\" {\n\t\tswitch status {\n\t\tcase \"success\":\n\t\t\twhereConditions = append(whereConditions, \"exception_code = 0\")\n\t\tcase \"error\":\n\t\t\twhereConditions = append(whereConditions, \"exception_code != 0\")\n\t\t}\n\t}\n\n\tsql := `SELECT\n\t\ttype, event_time, query_start_time, query_duration_ms,\n\t\tread_rows, read_bytes, written_rows, written_bytes,\n\t\tresult_rows, result_bytes, memory_usage,\n\t\tquery, query_kind, user, exception_code, exception,\n\t\tis_initial_query, databases, tables\n\tFROM system.query_log`\n\n\tif len(prewhereConditions) > 0 {\n\t\tsql += \"\\nPREWHERE \" + strings.Join(prewhereConditions, \" AND \")\n\t}\n\tif len(whereConditions) > 0 {\n\t\tsql += \"\\nWHERE \" + strings.Join(whereConditions, \" AND \")\n\t}\n\tsql += \"\\nORDER BY event_time DESC\"\n\tsql += fmt.Sprintf(\"\\nLIMIT %d OFFSET %d\", limit, offset)\n\tsql += \"\\nFORMAT JSON\"\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID, sql,\n\t\tsession.ClickhouseUser, password,\n\t\t60*time.Second,\n\t)\n\tif err != nil {\n\t\tslog.Warn(\"Failed to query system.query_log\", \"error\", err, \"connection\", session.ConnectionID)\n\n\t\tif shouldFallbackToQueryThreadLog(err) {\n\t\t\tfallbackSQL := strings.Replace(sql, \"system.query_log\", \"system.query_thread_log\", 1)\n\t\t\tresult, err = h.Gateway.ExecuteQuery(\n\t\t\t\tsession.ConnectionID, fallbackSQL,\n\t\t\t\tsession.ClickhouseUser, password,\n\t\t\t\t60*time.Second,\n\t\t\t)\n\t\t\tif err != nil {\n\t\t\t\tslog.Warn(\"Fallback to query_thread_log also failed\", \"error\", err)\n\t\t\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": err.Error()})\n\t\t\t\treturn\n\t\t\t}\n\t\t} else {\n\t\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": err.Error()})\n\t\t\treturn\n\t\t}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"data\": result.Data,\n\t\t\"meta\": result.Meta,\n\t})\n}\n\nfunc shouldFallbackToQueryThreadLog(err error) bool {\n\tif err == nil {\n\t\treturn false\n\t}\n\tmsg := strings.ToLower(err.Error())\n\tif strings.Contains(msg, \"system.query_log\") && strings.Contains(msg, \"unknown_table\") {\n\t\treturn true\n\t}\n\tif strings.Contains(msg, \"unknown table expression identifier\") && strings.Contains(msg, \"system.query_log\") {\n\t\treturn true\n\t}\n\treturn false\n}\n"
  },
  {
    "path": "internal/server/handlers/health.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"net/http\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/version\"\n)\n\ntype HealthHandler struct{}\n\nfunc (h *HealthHandler) Health(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(map[string]string{\n\t\t\"status\":    \"ok\",\n\t\t\"service\":   \"ch-ui\",\n\t\t\"version\":   version.Version,\n\t\t\"timestamp\": time.Now().UTC().Format(time.RFC3339),\n\t})\n}\n"
  },
  {
    "path": "internal/server/handlers/license.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"net/http\"\n\t\"strings\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/license\"\n)\n\n// LicenseHandler handles license status and activation endpoints.\ntype LicenseHandler struct {\n\tDB     *database.DB\n\tConfig *config.Config\n}\n\n// GetLicense returns the current license status.\n// GET /api/license\nfunc (h *LicenseHandler) GetLicense(w http.ResponseWriter, r *http.Request) {\n\tinfo := license.ValidateLicense(h.Config.LicenseJSON)\n\twriteJSON(w, http.StatusOK, info)\n}\n\n// ActivateLicense activates a new license by validating and storing the signed JSON.\n// POST /api/license/activate\nfunc (h *LicenseHandler) ActivateLicense(w http.ResponseWriter, r *http.Request) {\n\tvar body struct {\n\t\tLicense string `json:\"license\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tlicenseJSON := strings.TrimSpace(body.License)\n\tif licenseJSON == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"License JSON is required\"})\n\t\treturn\n\t}\n\n\tinfo := license.ValidateLicense(licenseJSON)\n\tif !info.Valid {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid or expired license\"})\n\t\treturn\n\t}\n\n\t// Store in settings\n\tif err := h.DB.SetSetting(\"license_json\", licenseJSON); err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to save license\"})\n\t\treturn\n\t}\n\n\t// Update runtime config\n\th.Config.LicenseJSON = licenseJSON\n\n\twriteJSON(w, http.StatusOK, info)\n}\n\n// DeactivateLicense removes the current license (downgrade to community).\n// POST /api/license/deactivate\nfunc (h *LicenseHandler) DeactivateLicense(w http.ResponseWriter, r *http.Request) {\n\tif err := h.DB.DeleteSetting(\"license_json\"); err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to remove license\"})\n\t\treturn\n\t}\n\n\th.Config.LicenseJSON = \"\"\n\n\twriteJSON(w, http.StatusOK, license.CommunityLicense())\n}\n"
  },
  {
    "path": "internal/server/handlers/models.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"strconv\"\n\t\"time\"\n\n\t\"github.com/go-chi/chi/v5\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/models\"\n\t\"github.com/caioricciuti/ch-ui/internal/scheduler\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n)\n\n// ModelsHandler handles model CRUD and execution.\ntype ModelsHandler struct {\n\tDB      *database.DB\n\tGateway *tunnel.Gateway\n\tConfig  *config.Config\n\tRunner  *models.Runner\n}\n\n// Routes returns a chi.Router with all model routes.\nfunc (h *ModelsHandler) Routes() chi.Router {\n\tr := chi.NewRouter()\n\n\tr.Get(\"/\", h.ListModels)\n\tr.Post(\"/\", h.CreateModel)\n\tr.Get(\"/dag\", h.GetDAG)\n\tr.Get(\"/validate\", h.ValidateAll)\n\tr.Post(\"/run\", h.RunAll)\n\tr.Get(\"/runs\", h.ListRuns)\n\tr.Get(\"/runs/{runId}\", h.GetRun)\n\tr.Get(\"/pipelines\", h.ListPipelines)\n\tr.Post(\"/pipelines/{anchorId}/run\", h.RunPipeline)\n\tr.Get(\"/schedules\", h.ListSchedules)\n\tr.Get(\"/schedule/{anchorId}\", h.GetSchedule)\n\tr.Put(\"/schedule/{anchorId}\", h.UpsertSchedule)\n\tr.Delete(\"/schedule/{anchorId}\", h.DeleteSchedule)\n\n\tr.Route(\"/{id}\", func(r chi.Router) {\n\t\tr.Get(\"/\", h.GetModel)\n\t\tr.Put(\"/\", h.UpdateModel)\n\t\tr.Delete(\"/\", h.DeleteModel)\n\t\tr.Post(\"/run\", h.RunSingle)\n\t})\n\n\treturn r\n}\n\n// ListModels returns all models for the current connection.\nfunc (h *ModelsHandler) ListModels(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tmodelList, err := h.DB.GetModelsByConnection(session.ConnectionID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list models\"})\n\t\treturn\n\t}\n\tif modelList == nil {\n\t\tmodelList = []database.Model{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"models\": modelList})\n}\n\n// CreateModel creates a new model.\nfunc (h *ModelsHandler) CreateModel(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName            string `json:\"name\"`\n\t\tDescription     string `json:\"description\"`\n\t\tTargetDatabase  string `json:\"target_database\"`\n\t\tMaterialization string `json:\"materialization\"`\n\t\tSQLBody         string `json:\"sql_body\"`\n\t\tTableEngine     string `json:\"table_engine\"`\n\t\tOrderBy         string `json:\"order_by\"`\n\t}\n\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tif err := models.ValidateModelName(body.Name); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\tif body.TargetDatabase == \"\" {\n\t\tbody.TargetDatabase = \"default\"\n\t}\n\tif body.Materialization == \"\" {\n\t\tbody.Materialization = \"view\"\n\t}\n\tif body.Materialization != \"view\" && body.Materialization != \"table\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"materialization must be 'view' or 'table'\"})\n\t\treturn\n\t}\n\tif body.Materialization == \"table\" {\n\t\tif body.TableEngine == \"\" {\n\t\t\tbody.TableEngine = \"MergeTree\"\n\t\t}\n\t\tif body.OrderBy == \"\" {\n\t\t\tbody.OrderBy = \"tuple()\"\n\t\t}\n\t}\n\n\tid, err := h.DB.CreateModel(\n\t\tsession.ConnectionID, body.Name, body.Description,\n\t\tbody.TargetDatabase, body.Materialization, body.SQLBody,\n\t\tbody.TableEngine, body.OrderBy, session.ClickhouseUser,\n\t)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": fmt.Sprintf(\"Failed to create model: %v\", err)})\n\t\treturn\n\t}\n\n\tmodel, _ := h.DB.GetModelByID(id)\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"model\": model})\n}\n\n// GetModel returns a single model.\nfunc (h *ModelsHandler) GetModel(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tmodel, err := h.DB.GetModelByID(id)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get model\"})\n\t\treturn\n\t}\n\tif model == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Model not found\"})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"model\": model})\n}\n\n// UpdateModel updates an existing model.\nfunc (h *ModelsHandler) UpdateModel(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\n\texisting, err := h.DB.GetModelByID(id)\n\tif err != nil || existing == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Model not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName            string `json:\"name\"`\n\t\tDescription     string `json:\"description\"`\n\t\tTargetDatabase  string `json:\"target_database\"`\n\t\tMaterialization string `json:\"materialization\"`\n\t\tSQLBody         string `json:\"sql_body\"`\n\t\tTableEngine     string `json:\"table_engine\"`\n\t\tOrderBy         string `json:\"order_by\"`\n\t}\n\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tif body.Name != \"\" {\n\t\tif err := models.ValidateModelName(body.Name); err != nil {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tbody.Name = existing.Name\n\t}\n\n\tif body.TargetDatabase == \"\" {\n\t\tbody.TargetDatabase = existing.TargetDatabase\n\t}\n\tif body.Materialization == \"\" {\n\t\tbody.Materialization = existing.Materialization\n\t}\n\tif body.Materialization != \"view\" && body.Materialization != \"table\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"materialization must be 'view' or 'table'\"})\n\t\treturn\n\t}\n\tif body.TableEngine == \"\" {\n\t\tbody.TableEngine = existing.TableEngine\n\t}\n\tif body.OrderBy == \"\" {\n\t\tbody.OrderBy = existing.OrderBy\n\t}\n\tif body.SQLBody == \"\" {\n\t\tbody.SQLBody = existing.SQLBody\n\t}\n\n\tif err := h.DB.UpdateModel(id, body.Name, body.Description, body.TargetDatabase,\n\t\tbody.Materialization, body.SQLBody, body.TableEngine, body.OrderBy); err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": fmt.Sprintf(\"Failed to update model: %v\", err)})\n\t\treturn\n\t}\n\n\tmodel, _ := h.DB.GetModelByID(id)\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"model\": model})\n}\n\n// DeleteModel removes a model.\nfunc (h *ModelsHandler) DeleteModel(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tif err := h.DB.DeleteModel(id); err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete model\"})\n\t\treturn\n\t}\n\twriteJSON(w, http.StatusOK, map[string]string{\"status\": \"deleted\"})\n}\n\n// GetDAG returns the dependency graph for XyFlow visualization.\nfunc (h *ModelsHandler) GetDAG(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tallModels, err := h.DB.GetModelsByConnection(session.ConnectionID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load models\"})\n\t\treturn\n\t}\n\n\tif len(allModels) == 0 {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"nodes\": []interface{}{},\n\t\t\t\"edges\": []interface{}{},\n\t\t})\n\t\treturn\n\t}\n\n\t// Build DAG for layout computation\n\tnameToID := make(map[string]string)\n\tvar modelIDs []string\n\trefsByID := make(map[string][]string)\n\tidToModel := make(map[string]database.Model)\n\n\tfor _, m := range allModels {\n\t\tnameToID[m.Name] = m.ID\n\t\tidToModel[m.ID] = m\n\t\tmodelIDs = append(modelIDs, m.ID)\n\t\trefsByID[m.ID] = models.ExtractRefs(m.SQLBody)\n\t}\n\n\tdag, dagErr := models.BuildDAG(modelIDs, refsByID, nameToID)\n\n\t// Compute depth for layout\n\tdepth := make(map[string]int)\n\tif dagErr == nil {\n\t\tfor _, id := range dag.Order {\n\t\t\td := 0\n\t\t\tfor _, depID := range dag.Deps[id] {\n\t\t\t\tif depth[depID] >= d {\n\t\t\t\t\td = depth[depID] + 1\n\t\t\t\t}\n\t\t\t}\n\t\t\tdepth[id] = d\n\t\t}\n\t}\n\n\t// Group by depth for y positioning\n\tlayers := make(map[int]int) // depth -> count at that depth\n\n\ttype dagNode struct {\n\t\tID       string      `json:\"id\"`\n\t\tData     interface{} `json:\"data\"`\n\t\tPosition struct {\n\t\t\tX float64 `json:\"x\"`\n\t\t\tY float64 `json:\"y\"`\n\t\t} `json:\"position\"`\n\t}\n\ttype dagEdge struct {\n\t\tID     string `json:\"id\"`\n\t\tSource string `json:\"source\"`\n\t\tTarget string `json:\"target\"`\n\t}\n\n\tvar nodes []dagNode\n\tvar edges []dagEdge\n\n\tfor _, m := range allModels {\n\t\td := depth[m.ID]\n\t\tidx := layers[d]\n\t\tlayers[d]++\n\n\t\tn := dagNode{\n\t\t\tID: m.ID,\n\t\t\tData: map[string]interface{}{\n\t\t\t\t\"name\":            m.Name,\n\t\t\t\t\"materialization\": m.Materialization,\n\t\t\t\t\"status\":          m.Status,\n\t\t\t\t\"target_database\": m.TargetDatabase,\n\t\t\t},\n\t\t}\n\t\tn.Position.X = float64(d) * 300\n\t\tn.Position.Y = float64(idx) * 120\n\n\t\tnodes = append(nodes, n)\n\t}\n\n\t// Build edges from refs\n\tfor _, m := range allModels {\n\t\trefs := models.ExtractRefs(m.SQLBody)\n\t\tfor _, ref := range refs {\n\t\t\tif srcID, ok := nameToID[ref]; ok {\n\t\t\t\tedges = append(edges, dagEdge{\n\t\t\t\t\tID:     fmt.Sprintf(\"e-%s-%s\", srcID, m.ID),\n\t\t\t\t\tSource: srcID,\n\t\t\t\t\tTarget: m.ID,\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"nodes\": nodes,\n\t\t\"edges\": edges,\n\t})\n}\n\n// ValidateAll checks all models for reference errors and cycles.\nfunc (h *ModelsHandler) ValidateAll(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\terrors, err := h.Runner.Validate(session.ConnectionID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": fmt.Sprintf(\"Validation failed: %v\", err)})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"valid\":  len(errors) == 0,\n\t\t\"errors\": errors,\n\t})\n}\n\n// RunAll triggers execution of all models.\nfunc (h *ModelsHandler) RunAll(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\trunID, err := h.Runner.RunAll(session.ConnectionID, session.ClickhouseUser)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"run_id\": runID})\n}\n\n// RunSingle triggers execution of a single model and its deps.\nfunc (h *ModelsHandler) RunSingle(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\trunID, err := h.Runner.RunSingle(session.ConnectionID, id, session.ClickhouseUser)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"run_id\": runID})\n}\n\n// ListRuns returns recent model runs.\nfunc (h *ModelsHandler) ListRuns(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tlimit, _ := strconv.Atoi(r.URL.Query().Get(\"limit\"))\n\tif limit <= 0 {\n\t\tlimit = 20\n\t}\n\toffset, _ := strconv.Atoi(r.URL.Query().Get(\"offset\"))\n\n\truns, err := h.DB.GetModelRuns(session.ConnectionID, limit, offset)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list runs\"})\n\t\treturn\n\t}\n\tif runs == nil {\n\t\truns = []database.ModelRun{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"runs\": runs})\n}\n\n// GetRun returns a single run with per-model results.\nfunc (h *ModelsHandler) GetRun(w http.ResponseWriter, r *http.Request) {\n\trunID := chi.URLParam(r, \"runId\")\n\n\trun, err := h.DB.GetModelRunByID(runID)\n\tif err != nil || run == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Run not found\"})\n\t\treturn\n\t}\n\n\tresults, err := h.DB.GetModelRunResults(runID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load run results\"})\n\t\treturn\n\t}\n\tif results == nil {\n\t\tresults = []database.ModelRunResult{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"run\":     run,\n\t\t\"results\": results,\n\t})\n}\n\n// ── Pipeline endpoints ──────────────────────────────────────────────\n\n// ListPipelines returns connected components with their schedules.\nfunc (h *ModelsHandler) ListPipelines(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tallModels, err := h.DB.GetModelsByConnection(session.ConnectionID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load models\"})\n\t\treturn\n\t}\n\n\tif len(allModels) == 0 {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\"pipelines\": []interface{}{}})\n\t\treturn\n\t}\n\n\tnameToID := make(map[string]string)\n\tvar modelIDs []string\n\trefsByID := make(map[string][]string)\n\n\tfor _, m := range allModels {\n\t\tnameToID[m.Name] = m.ID\n\t\tmodelIDs = append(modelIDs, m.ID)\n\t\trefsByID[m.ID] = models.ExtractRefs(m.SQLBody)\n\t}\n\n\tdag, dagErr := models.BuildDAG(modelIDs, refsByID, nameToID)\n\tif dagErr != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": fmt.Sprintf(\"DAG error: %v\", dagErr)})\n\t\treturn\n\t}\n\n\tcomponents := dag.ConnectedComponents()\n\n\t// Load all schedules for this connection\n\tschedules, err := h.DB.GetModelSchedulesByConnection(session.ConnectionID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to load schedules\"})\n\t\treturn\n\t}\n\tschedByAnchor := make(map[string]database.ModelSchedule)\n\tfor _, s := range schedules {\n\t\tif s.AnchorModelID != nil {\n\t\t\tschedByAnchor[*s.AnchorModelID] = s\n\t\t}\n\t}\n\n\ttype pipelineResp struct {\n\t\tAnchorModelID string                  `json:\"anchor_model_id\"`\n\t\tModelIDs      []string                `json:\"model_ids\"`\n\t\tSchedule      *database.ModelSchedule `json:\"schedule\"`\n\t}\n\n\tvar pipelines []pipelineResp\n\tfor _, comp := range components {\n\t\tif len(comp) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\tanchor := comp[0] // first in topo order\n\t\tp := pipelineResp{\n\t\t\tAnchorModelID: anchor,\n\t\t\tModelIDs:      comp,\n\t\t}\n\t\tif s, ok := schedByAnchor[anchor]; ok {\n\t\t\tp.Schedule = &s\n\t\t} else {\n\t\t\t// Check if any model in this component has a schedule\n\t\t\tfor _, id := range comp {\n\t\t\t\tif s, ok := schedByAnchor[id]; ok {\n\t\t\t\t\tp.Schedule = &s\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tpipelines = append(pipelines, p)\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"pipelines\": pipelines})\n}\n\n// RunPipeline triggers execution of a single pipeline (connected component).\nfunc (h *ModelsHandler) RunPipeline(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tanchorID := chi.URLParam(r, \"anchorId\")\n\trunID, err := h.Runner.RunPipeline(session.ConnectionID, anchorID, session.ClickhouseUser)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"run_id\": runID})\n}\n\n// ── Schedule endpoints ──────────────────────────────────────────────\n\n// ListSchedules returns all schedules for the current connection.\nfunc (h *ModelsHandler) ListSchedules(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tschedules, err := h.DB.GetModelSchedulesByConnection(session.ConnectionID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list schedules\"})\n\t\treturn\n\t}\n\tif schedules == nil {\n\t\tschedules = []database.ModelSchedule{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"schedules\": schedules})\n}\n\n// GetSchedule returns the schedule for a specific pipeline anchor.\nfunc (h *ModelsHandler) GetSchedule(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tanchorID := chi.URLParam(r, \"anchorId\")\n\tsched, err := h.DB.GetModelScheduleByAnchor(session.ConnectionID, anchorID)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get schedule\"})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"schedule\": sched})\n}\n\n// UpsertSchedule creates or updates the schedule for a specific pipeline anchor.\nfunc (h *ModelsHandler) UpsertSchedule(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tanchorID := chi.URLParam(r, \"anchorId\")\n\n\tvar body struct {\n\t\tCron    string `json:\"cron\"`\n\t\tEnabled bool   `json:\"enabled\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tif body.Cron == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"cron expression is required\"})\n\t\treturn\n\t}\n\tif !scheduler.ValidateCron(body.Cron) {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"invalid cron expression\"})\n\t\treturn\n\t}\n\n\tvar nextRunAt string\n\tif next := scheduler.ComputeNextRun(body.Cron, time.Now().UTC()); next != nil {\n\t\tnextRunAt = next.Format(time.RFC3339)\n\t}\n\n\t_, err := h.DB.UpsertModelSchedule(session.ConnectionID, anchorID, body.Cron, nextRunAt, session.ClickhouseUser)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": fmt.Sprintf(\"Failed to save schedule: %v\", err)})\n\t\treturn\n\t}\n\n\tsched, _ := h.DB.GetModelScheduleByAnchor(session.ConnectionID, anchorID)\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"schedule\": sched})\n}\n\n// DeleteSchedule removes the schedule for a specific pipeline anchor.\nfunc (h *ModelsHandler) DeleteSchedule(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tanchorID := chi.URLParam(r, \"anchorId\")\n\tif err := h.DB.DeleteModelScheduleByAnchor(session.ConnectionID, anchorID); err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete schedule\"})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]string{\"status\": \"deleted\"})\n}\n"
  },
  {
    "path": "internal/server/handlers/pipelines.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com/go-chi/chi/v5\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/pipelines\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n)\n\n// PipelinesHandler handles pipeline CRUD and lifecycle operations.\ntype PipelinesHandler struct {\n\tDB      *database.DB\n\tGateway *tunnel.Gateway\n\tConfig  *config.Config\n\tRunner  *pipelines.Runner\n}\n\n// Routes returns a chi.Router with all pipeline routes mounted.\nfunc (h *PipelinesHandler) Routes() chi.Router {\n\tr := chi.NewRouter()\n\n\tr.Get(\"/\", h.ListPipelines)\n\tr.Post(\"/\", h.CreatePipeline)\n\n\tr.Route(\"/{id}\", func(r chi.Router) {\n\t\tr.Get(\"/\", h.GetPipeline)\n\t\tr.Put(\"/\", h.UpdatePipeline)\n\t\tr.Delete(\"/\", h.DeletePipeline)\n\n\t\t// Graph operations\n\t\tr.Put(\"/graph\", h.SaveGraph)\n\n\t\t// Lifecycle\n\t\tr.Post(\"/start\", h.StartPipeline)\n\t\tr.Post(\"/stop\", h.StopPipeline)\n\n\t\t// Status & monitoring\n\t\tr.Get(\"/status\", h.GetStatus)\n\t\tr.Get(\"/runs\", h.ListRuns)\n\t\tr.Get(\"/runs/{runId}/logs\", h.GetRunLogs)\n\t})\n\n\treturn r\n}\n\n// ListPipelines returns all pipelines.\nfunc (h *PipelinesHandler) ListPipelines(w http.ResponseWriter, r *http.Request) {\n\tpipelines, err := h.DB.GetPipelines()\n\tif err != nil {\n\t\tslog.Error(\"Failed to list pipelines\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list pipelines\"})\n\t\treturn\n\t}\n\n\tif pipelines == nil {\n\t\tpipelines = []database.Pipeline{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"pipelines\": pipelines})\n}\n\n// GetPipeline returns a single pipeline with its graph.\nfunc (h *PipelinesHandler) GetPipeline(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Pipeline ID is required\"})\n\t\treturn\n\t}\n\n\tpipeline, err := h.DB.GetPipelineByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get pipeline\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get pipeline\"})\n\t\treturn\n\t}\n\tif pipeline == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Pipeline not found\"})\n\t\treturn\n\t}\n\n\tnodes, edges, err := h.DB.GetPipelineGraph(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get pipeline graph\", \"error\", err, \"pipeline\", id)\n\t\tnodes = []database.PipelineNode{}\n\t\tedges = []database.PipelineEdge{}\n\t}\n\tif nodes == nil {\n\t\tnodes = []database.PipelineNode{}\n\t}\n\tif edges == nil {\n\t\tedges = []database.PipelineEdge{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"pipeline\": pipeline,\n\t\t\"graph\": map[string]interface{}{\n\t\t\t\"nodes\": nodes,\n\t\t\t\"edges\": edges,\n\t\t},\n\t})\n}\n\n// CreatePipeline creates a new pipeline.\nfunc (h *PipelinesHandler) CreatePipeline(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName         string `json:\"name\"`\n\t\tDescription  string `json:\"description\"`\n\t\tConnectionID string `json:\"connection_id\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid JSON body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Name is required\"})\n\t\treturn\n\t}\n\n\tconnectionID := strings.TrimSpace(body.ConnectionID)\n\tif connectionID == \"\" {\n\t\t// Use the session's connection ID as default\n\t\tconnectionID = session.ConnectionID\n\t}\n\n\tid, err := h.DB.CreatePipeline(name, strings.TrimSpace(body.Description), connectionID, session.ClickhouseUser)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create pipeline\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create pipeline\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"pipeline.created\",\n\t\tUsername: &session.ClickhouseUser,\n\t\tDetails:  &name,\n\t})\n\n\tpipeline, _ := h.DB.GetPipelineByID(id)\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\"pipeline\": pipeline})\n}\n\n// UpdatePipeline updates a pipeline's name and description.\nfunc (h *PipelinesHandler) UpdatePipeline(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\n\tvar body struct {\n\t\tName        string `json:\"name\"`\n\t\tDescription string `json:\"description\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid JSON body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Name is required\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.UpdatePipeline(id, name, strings.TrimSpace(body.Description)); err != nil {\n\t\tslog.Error(\"Failed to update pipeline\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update pipeline\"})\n\t\treturn\n\t}\n\n\tpipeline, _ := h.DB.GetPipelineByID(id)\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"pipeline\": pipeline})\n}\n\n// DeletePipeline deletes a pipeline.\nfunc (h *PipelinesHandler) DeletePipeline(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\n\t// Check if pipeline exists and is not running\n\tpipeline, err := h.DB.GetPipelineByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get pipeline\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get pipeline\"})\n\t\treturn\n\t}\n\tif pipeline == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Pipeline not found\"})\n\t\treturn\n\t}\n\tif pipeline.Status == \"running\" || pipeline.Status == \"starting\" {\n\t\twriteJSON(w, http.StatusConflict, map[string]string{\"error\": \"Cannot delete a running pipeline. Stop it first.\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.DeletePipeline(id); err != nil {\n\t\tslog.Error(\"Failed to delete pipeline\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete pipeline\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"pipeline.deleted\",\n\t\tUsername: &session.ClickhouseUser,\n\t\tDetails:  &pipeline.Name,\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]string{\"success\": \"true\"})\n}\n\n// SaveGraph saves the entire pipeline graph (nodes + edges).\nfunc (h *PipelinesHandler) SaveGraph(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\n\tvar body struct {\n\t\tNodes    []graphNode    `json:\"nodes\"`\n\t\tEdges    []graphEdge    `json:\"edges\"`\n\t\tViewport *graphViewport `json:\"viewport\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid JSON body\"})\n\t\treturn\n\t}\n\n\t// Convert to database types\n\tvar nodes []database.PipelineNode\n\tfor _, n := range body.Nodes {\n\t\tconfigJSON, _ := json.Marshal(n.Config)\n\t\tnodes = append(nodes, database.PipelineNode{\n\t\t\tID:              n.ID,\n\t\t\tPipelineID:      id,\n\t\t\tNodeType:        n.NodeType,\n\t\t\tLabel:           n.Label,\n\t\t\tPositionX:       n.PositionX,\n\t\t\tPositionY:       n.PositionY,\n\t\t\tConfigEncrypted: string(configJSON),\n\t\t})\n\t}\n\n\tvar edges []database.PipelineEdge\n\tfor _, e := range body.Edges {\n\t\tedges = append(edges, database.PipelineEdge{\n\t\t\tID:           e.ID,\n\t\t\tPipelineID:   id,\n\t\t\tSourceNodeID: e.SourceNodeID,\n\t\t\tTargetNodeID: e.TargetNodeID,\n\t\t\tSourceHandle: e.SourceHandle,\n\t\t\tTargetHandle: e.TargetHandle,\n\t\t})\n\t}\n\n\tviewportJSON := \"\"\n\tif body.Viewport != nil {\n\t\tvp, _ := json.Marshal(body.Viewport)\n\t\tviewportJSON = string(vp)\n\t}\n\n\tif err := h.DB.SavePipelineGraph(id, nodes, edges, viewportJSON); err != nil {\n\t\tslog.Error(\"Failed to save pipeline graph\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to save pipeline graph\"})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]string{\"success\": \"true\"})\n}\n\n// StartPipeline starts a pipeline (placeholder for Phase 3).\nfunc (h *PipelinesHandler) StartPipeline(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\n\tpipeline, err := h.DB.GetPipelineByID(id)\n\tif err != nil || pipeline == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Pipeline not found\"})\n\t\treturn\n\t}\n\n\tif pipeline.Status == \"running\" || pipeline.Status == \"starting\" {\n\t\twriteJSON(w, http.StatusConflict, map[string]string{\"error\": \"Pipeline is already running\"})\n\t\treturn\n\t}\n\n\tif err := h.Runner.StartPipeline(id); err != nil {\n\t\tslog.Error(\"Failed to start pipeline\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"pipeline.started\",\n\t\tUsername: &session.ClickhouseUser,\n\t\tDetails:  &pipeline.Name,\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]string{\"success\": \"true\"})\n}\n\n// StopPipeline stops a running pipeline (placeholder for Phase 3).\nfunc (h *PipelinesHandler) StopPipeline(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\n\tpipeline, err := h.DB.GetPipelineByID(id)\n\tif err != nil || pipeline == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Pipeline not found\"})\n\t\treturn\n\t}\n\n\tif pipeline.Status != \"running\" && pipeline.Status != \"starting\" {\n\t\twriteJSON(w, http.StatusConflict, map[string]string{\"error\": \"Pipeline is not running\"})\n\t\treturn\n\t}\n\n\tif err := h.Runner.StopPipeline(id); err != nil {\n\t\tslog.Error(\"Failed to stop pipeline\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"pipeline.stopped\",\n\t\tUsername: &session.ClickhouseUser,\n\t\tDetails:  &pipeline.Name,\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]string{\"success\": \"true\"})\n}\n\n// GetStatus returns the current status of a pipeline.\nfunc (h *PipelinesHandler) GetStatus(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\n\tpipeline, err := h.DB.GetPipelineByID(id)\n\tif err != nil || pipeline == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Pipeline not found\"})\n\t\treturn\n\t}\n\n\tresp := map[string]interface{}{\n\t\t\"pipeline_id\": pipeline.ID,\n\t\t\"status\":      pipeline.Status,\n\t\t\"last_error\":  pipeline.LastError,\n\t}\n\n\t// Add live metrics if pipeline is running\n\tif metrics := h.Runner.GetRunningMetrics(id); metrics != nil {\n\t\tresp[\"rows_ingested\"] = metrics.RowsIngested.Load()\n\t\tresp[\"bytes_ingested\"] = metrics.BytesIngested.Load()\n\t\tresp[\"batches_sent\"] = metrics.BatchesSent.Load()\n\t\tresp[\"errors_count\"] = metrics.ErrorsCount.Load()\n\t}\n\n\twriteJSON(w, http.StatusOK, resp)\n}\n\n// ListRuns returns execution runs for a pipeline.\nfunc (h *PipelinesHandler) ListRuns(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\n\tlimit := 20\n\toffset := 0\n\tif v := r.URL.Query().Get(\"limit\"); v != \"\" {\n\t\tif parsed, err := strconv.Atoi(v); err == nil && parsed > 0 {\n\t\t\tlimit = parsed\n\t\t}\n\t}\n\tif v := r.URL.Query().Get(\"offset\"); v != \"\" {\n\t\tif parsed, err := strconv.Atoi(v); err == nil && parsed >= 0 {\n\t\t\toffset = parsed\n\t\t}\n\t}\n\n\truns, err := h.DB.GetPipelineRuns(id, limit, offset)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list pipeline runs\", \"error\", err, \"pipeline\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to list runs\"})\n\t\treturn\n\t}\n\tif runs == nil {\n\t\truns = []database.PipelineRun{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"runs\": runs})\n}\n\n// GetRunLogs returns logs for a specific pipeline run.\nfunc (h *PipelinesHandler) GetRunLogs(w http.ResponseWriter, r *http.Request) {\n\trunID := chi.URLParam(r, \"runId\")\n\n\tlimit := 200\n\tif v := r.URL.Query().Get(\"limit\"); v != \"\" {\n\t\tif parsed, err := strconv.Atoi(v); err == nil && parsed > 0 {\n\t\t\tlimit = parsed\n\t\t}\n\t}\n\n\tlogs, err := h.DB.GetPipelineRunLogs(runID, limit)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get run logs\", \"error\", err, \"run\", runID)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to get run logs\"})\n\t\treturn\n\t}\n\tif logs == nil {\n\t\tlogs = []database.PipelineRunLog{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"logs\": logs})\n}\n\n// ── Graph request types ────────────────────────────────────────────\n\ntype graphNode struct {\n\tID        string                 `json:\"id\"`\n\tNodeType  string                 `json:\"node_type\"`\n\tLabel     string                 `json:\"label\"`\n\tPositionX float64                `json:\"position_x\"`\n\tPositionY float64                `json:\"position_y\"`\n\tConfig    map[string]interface{} `json:\"config\"`\n}\n\ntype graphEdge struct {\n\tID           string  `json:\"id\"`\n\tSourceNodeID string  `json:\"source_node_id\"`\n\tTargetNodeID string  `json:\"target_node_id\"`\n\tSourceHandle *string `json:\"source_handle\"`\n\tTargetHandle *string `json:\"target_handle\"`\n}\n\ntype graphViewport struct {\n\tX    float64 `json:\"x\"`\n\tY    float64 `json:\"y\"`\n\tZoom float64 `json:\"zoom\"`\n}\n"
  },
  {
    "path": "internal/server/handlers/query.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"regexp\"\n\t\"sort\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/governance\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n\t\"github.com/go-chi/chi/v5\"\n)\n\nconst maxQueryTimeout = 5 * time.Minute\n\n// QueryHandler handles SQL query execution and schema exploration endpoints.\ntype QueryHandler struct {\n\tDB         *database.DB\n\tGateway    *tunnel.Gateway\n\tConfig     *config.Config\n\tGuardrails *governance.GuardrailService\n}\n\n// Routes registers all query-related routes on the given chi.Router.\nfunc (h *QueryHandler) Routes(r chi.Router) {\n\tr.Post(\"/\", h.ExecuteQuery)\n\tr.Post(\"/run\", h.ExecuteQuery)\n\tr.Post(\"/stream\", h.StreamQuery)\n\tr.Post(\"/sample\", h.SampleQuery)\n\tr.Post(\"/explorer-data\", h.ExplorerData)\n\tr.Post(\"/format\", h.FormatSQL)\n\tr.Post(\"/explain\", h.ExplainQuery)\n\tr.Post(\"/plan\", h.QueryPlan)\n\tr.Post(\"/profile\", h.QueryProfile)\n\tr.Post(\"/estimate\", h.EstimateQuery)\n\tr.Get(\"/databases\", h.ListDatabases)\n\tr.Get(\"/tables\", h.ListTables)\n\tr.Get(\"/columns\", h.ListColumns)\n\tr.Get(\"/data-types\", h.ListDataTypes)\n\tr.Get(\"/clusters\", h.ListClusters)\n\tr.Post(\"/schema/database\", h.CreateDatabase)\n\tr.Post(\"/schema/database/drop\", h.DropDatabase)\n\tr.Post(\"/schema/table\", h.CreateTable)\n\tr.Post(\"/schema/table/drop\", h.DropTable)\n\tr.Post(\"/upload/discover\", h.DiscoverUploadSchema)\n\tr.Post(\"/upload/ingest\", h.IngestUpload)\n\tr.Get(\"/host-info\", h.GetHostInfo)\n\tr.Get(\"/completions\", h.ListCompletions)\n}\n\n// --- Request / Response types ---\n\ntype executeQueryRequest struct {\n\tQuery         string `json:\"query\"`\n\tTimeout       int    `json:\"timeout\"`       // seconds\n\tMaxResultRows int    `json:\"maxResultRows\"` // server-side row cap via ClickHouse max_result_rows\n}\n\ntype executeQueryResponse struct {\n\tSuccess    bool            `json:\"success\"`\n\tData       json.RawMessage `json:\"data,omitempty\"`\n\tMeta       json.RawMessage `json:\"meta,omitempty\"`\n\tStatistics json.RawMessage `json:\"statistics,omitempty\"`\n\tRows       int             `json:\"rows\"`\n\tElapsedMS  int64           `json:\"elapsed_ms\"`\n}\n\ntype formatRequest struct {\n\tQuery string `json:\"query\"`\n}\n\ntype formatResponse struct {\n\tFormatted string `json:\"formatted\"`\n}\n\ntype explainRequest struct {\n\tQuery string `json:\"query\"`\n}\n\ntype sampleRequest struct {\n\tQuery    string `json:\"query\"`\n\tPerShard int    `json:\"per_shard\"`\n\tShardBy  string `json:\"shard_by\"`\n\tTimeout  int    `json:\"timeout\"`\n}\n\ntype planNode struct {\n\tID       string  `json:\"id\"`\n\tParentID *string `json:\"parent_id,omitempty\"`\n\tLevel    int     `json:\"level\"`\n\tLabel    string  `json:\"label\"`\n}\n\ntype createDatabaseRequest struct {\n\tName        string `json:\"name\"`\n\tEngine      string `json:\"engine\"`\n\tOnCluster   string `json:\"on_cluster\"`\n\tIfNotExists *bool  `json:\"if_not_exists\"`\n}\n\ntype dropDatabaseRequest struct {\n\tName      string `json:\"name\"`\n\tOnCluster string `json:\"on_cluster\"`\n\tIfExists  *bool  `json:\"if_exists\"`\n\tSync      bool   `json:\"sync\"`\n}\n\ntype createTableColumn struct {\n\tName              string `json:\"name\"`\n\tType              string `json:\"type\"`\n\tDefaultExpression string `json:\"default_expression\"`\n\tComment           string `json:\"comment\"`\n}\n\ntype createTableRequest struct {\n\tDatabase    string              `json:\"database\"`\n\tName        string              `json:\"name\"`\n\tEngine      string              `json:\"engine\"`\n\tOnCluster   string              `json:\"on_cluster\"`\n\tIfNotExists *bool               `json:\"if_not_exists\"`\n\tColumns     []createTableColumn `json:\"columns\"`\n\tOrderBy     string              `json:\"order_by\"`\n\tPartitionBy string              `json:\"partition_by\"`\n\tPrimaryKey  string              `json:\"primary_key\"`\n\tSampleBy    string              `json:\"sample_by\"`\n\tTTL         string              `json:\"ttl\"`\n\tSettings    string              `json:\"settings\"`\n\tComment     string              `json:\"comment\"`\n}\n\ntype dropTableRequest struct {\n\tDatabase  string `json:\"database\"`\n\tName      string `json:\"name\"`\n\tOnCluster string `json:\"on_cluster\"`\n\tIfExists  *bool  `json:\"if_exists\"`\n\tSync      bool   `json:\"sync\"`\n}\n\n// --- Handlers ---\n\n// ExecuteQuery handles POST / and POST /run.\nfunc (h *QueryHandler) ExecuteQuery(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tvar req executeQueryRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tquery := strings.TrimSpace(req.Query)\n\tif query == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"Query is required\")\n\t\treturn\n\t}\n\tif !h.enforceGuardrailsForQuery(w, r, query, r.URL.Path) {\n\t\treturn\n\t}\n\n\t// Determine timeout\n\ttimeout := 30 * time.Second\n\tif req.Timeout > 0 {\n\t\ttimeout = time.Duration(req.Timeout) * time.Second\n\t}\n\tif timeout > maxQueryTimeout {\n\t\ttimeout = maxQueryTimeout\n\t}\n\n\t// Decrypt password\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\t// Execute query via tunnel\n\tstart := time.Now()\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\tquery,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\ttimeout,\n\t)\n\telapsed := time.Since(start).Milliseconds()\n\n\tif err != nil {\n\t\tslog.Warn(\"Query execution failed\", \"error\", err, \"connection\", session.ConnectionID)\n\t\twriteError(w, http.StatusBadGateway, err.Error())\n\t\treturn\n\t}\n\n\t// Count rows from data\n\trows := countRows(result.Data)\n\n\t// Audit log (truncate query preview to 100 chars)\n\tpreview := query\n\tif len(preview) > 100 {\n\t\tpreview = preview[:100] + \"...\"\n\t}\n\tgo func() {\n\t\tip := r.RemoteAddr\n\t\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\t\tAction:       \"query.execute\",\n\t\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\t\tConnectionID: strPtr(session.ConnectionID),\n\t\t\tDetails:      strPtr(preview),\n\t\t\tIPAddress:    strPtr(ip),\n\t\t})\n\t}()\n\n\tresp := executeQueryResponse{\n\t\tSuccess:    true,\n\t\tData:       result.Data,\n\t\tMeta:       result.Meta,\n\t\tStatistics: result.Stats,\n\t\tRows:       rows,\n\t\tElapsedMS:  elapsed,\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(http.StatusOK)\n\tjson.NewEncoder(w).Encode(resp)\n}\n\n// FormatSQL handles POST /format.\nfunc (h *QueryHandler) FormatSQL(w http.ResponseWriter, r *http.Request) {\n\tvar req formatRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tquery := strings.TrimSpace(req.Query)\n\tif query == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"Query is required\")\n\t\treturn\n\t}\n\n\tformatted := formatSQL(query)\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(formatResponse{Formatted: formatted})\n}\n\n// ExplainQuery handles POST /explain.\nfunc (h *QueryHandler) ExplainQuery(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tvar req explainRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tquery := strings.TrimSpace(req.Query)\n\tif query == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"Query is required\")\n\t\treturn\n\t}\n\tif !h.enforceGuardrailsForQuery(w, r, query, r.URL.Path) {\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\texplainSQL := \"EXPLAIN \" + query\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\texplainSQL,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t30*time.Second,\n\t)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadGateway, err.Error())\n\t\treturn\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"success\": true,\n\t\t\"data\":    result.Data,\n\t\t\"meta\":    result.Meta,\n\t})\n}\n\n// QueryPlan handles POST /plan and returns a parsed plan tree for visualization.\nfunc (h *QueryHandler) QueryPlan(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tvar req explainRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tquery := strings.TrimSpace(req.Query)\n\tif query == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"Query is required\")\n\t\treturn\n\t}\n\tif !h.enforceGuardrailsForQuery(w, r, query, r.URL.Path) {\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tcandidates := []struct {\n\t\tsource string\n\t\tsql    string\n\t}{\n\t\t{source: \"plan\", sql: \"EXPLAIN PLAN \" + query},\n\t\t{source: \"ast\", sql: \"EXPLAIN AST \" + query},\n\t\t{source: \"generic\", sql: \"EXPLAIN \" + query},\n\t}\n\n\tvar lastErr error\n\tfor _, candidate := range candidates {\n\t\tresult, execErr := h.Gateway.ExecuteQuery(\n\t\t\tsession.ConnectionID,\n\t\t\tcandidate.sql,\n\t\t\tsession.ClickhouseUser,\n\t\t\tpassword,\n\t\t\t45*time.Second,\n\t\t)\n\t\tif execErr != nil {\n\t\t\tlastErr = execErr\n\t\t\tcontinue\n\t\t}\n\n\t\tlines := extractExplainLines(result.Data)\n\t\tif len(lines) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tnodes := buildPlanTree(lines)\n\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"success\": true,\n\t\t\t\"source\":  candidate.source,\n\t\t\t\"lines\":   lines,\n\t\t\t\"nodes\":   nodes,\n\t\t})\n\t\treturn\n\t}\n\n\tif lastErr != nil {\n\t\twriteError(w, http.StatusBadGateway, lastErr.Error())\n\t\treturn\n\t}\n\twriteError(w, http.StatusBadGateway, \"No plan information returned by ClickHouse\")\n}\n\n// EstimateQuery handles POST /estimate and returns cost estimation via EXPLAIN ESTIMATE.\nfunc (h *QueryHandler) EstimateQuery(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tvar req explainRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tquery := strings.TrimSpace(req.Query)\n\tif query == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"Query is required\")\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\t\"EXPLAIN ESTIMATE \"+query,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t15*time.Second,\n\t)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"success\":      true,\n\t\t\t\"tables\":       []interface{}{},\n\t\t\t\"total_rows\":   0,\n\t\t\t\"total_parts\":  0,\n\t\t\t\"total_marks\":  0,\n\t\t\t\"error\":        err.Error(),\n\t\t})\n\t\treturn\n\t}\n\n\trows := decodeRows(result.Data)\n\n\ttype tableEstimate struct {\n\t\tDatabase string `json:\"database\"`\n\t\tTable    string `json:\"table\"`\n\t\tParts    int64  `json:\"parts\"`\n\t\tRows     int64  `json:\"rows\"`\n\t\tMarks    int64  `json:\"marks\"`\n\t}\n\n\tvar tables []tableEstimate\n\tvar totalRows, totalParts, totalMarks int64\n\n\tfor _, row := range rows {\n\t\tte := tableEstimate{\n\t\t\tDatabase: fmt.Sprint(row[\"database\"]),\n\t\t\tTable:    fmt.Sprint(row[\"table\"]),\n\t\t}\n\t\tif v, ok := row[\"parts\"]; ok {\n\t\t\tte.Parts = toInt64(v)\n\t\t}\n\t\tif v, ok := row[\"rows\"]; ok {\n\t\t\tte.Rows = toInt64(v)\n\t\t}\n\t\tif v, ok := row[\"marks\"]; ok {\n\t\t\tte.Marks = toInt64(v)\n\t\t}\n\t\ttables = append(tables, te)\n\t\ttotalRows += te.Rows\n\t\ttotalParts += te.Parts\n\t\ttotalMarks += te.Marks\n\t}\n\n\tif tables == nil {\n\t\ttables = []tableEstimate{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":     true,\n\t\t\"tables\":      tables,\n\t\t\"total_rows\":  totalRows,\n\t\t\"total_parts\": totalParts,\n\t\t\"total_marks\": totalMarks,\n\t})\n}\n\n// toInt64 converts an interface value to int64.\nfunc toInt64(v interface{}) int64 {\n\tswitch val := v.(type) {\n\tcase float64:\n\t\treturn int64(val)\n\tcase int64:\n\t\treturn val\n\tcase int:\n\t\treturn int64(val)\n\tcase string:\n\t\tn, _ := strconv.ParseInt(val, 10, 64)\n\t\treturn n\n\tcase json.Number:\n\t\tn, _ := val.Int64()\n\t\treturn n\n\tdefault:\n\t\ts := fmt.Sprint(v)\n\t\tn, _ := strconv.ParseInt(s, 10, 64)\n\t\treturn n\n\t}\n}\n\n// SampleQuery handles POST /sample and returns first N rows per shard when available.\nfunc (h *QueryHandler) SampleQuery(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tvar req sampleRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tquery := strings.TrimSpace(req.Query)\n\tif query == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"Query is required\")\n\t\treturn\n\t}\n\tif !isReadOnlyQuery(query) {\n\t\twriteError(w, http.StatusBadRequest, \"Sampling only supports read-only SELECT/WITH queries\")\n\t\treturn\n\t}\n\tif !h.enforceGuardrailsForQuery(w, r, query, r.URL.Path) {\n\t\treturn\n\t}\n\n\tperShard := req.PerShard\n\tif perShard <= 0 {\n\t\tperShard = 25\n\t}\n\tif perShard > 500 {\n\t\tperShard = 500\n\t}\n\n\tshardBy := strings.TrimSpace(req.ShardBy)\n\tif shardBy == \"\" {\n\t\tshardBy = \"_shard_num\"\n\t}\n\ttimeout := 30 * time.Second\n\tif req.Timeout > 0 {\n\t\ttimeout = time.Duration(req.Timeout) * time.Second\n\t}\n\tif timeout > maxQueryTimeout {\n\t\ttimeout = maxQueryTimeout\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tbase := stripFormatClause(stripTrailingSemicolon(query))\n\tperShardSQL := fmt.Sprintf(\n\t\t\"SELECT * FROM (%s) AS __ch_ui_sample LIMIT %d BY %s\",\n\t\tbase,\n\t\tperShard,\n\t\tescapeIdentifier(shardBy),\n\t)\n\n\tstart := time.Now()\n\tresult, runErr := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\tperShardSQL,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\ttimeout,\n\t)\n\telapsed := time.Since(start).Milliseconds()\n\n\tsamplingMode := \"per_shard\"\n\twarning := \"\"\n\n\tif runErr != nil && shouldFallbackToGlobalSample(runErr.Error()) {\n\t\t// Fallback for local/non-distributed queries where _shard_num is unavailable.\n\t\tfallbackSQL := fmt.Sprintf(\n\t\t\t\"SELECT * FROM (%s) AS __ch_ui_sample LIMIT %d\",\n\t\t\tbase,\n\t\t\tperShard,\n\t\t)\n\n\t\tstart = time.Now()\n\t\tresult, runErr = h.Gateway.ExecuteQuery(\n\t\t\tsession.ConnectionID,\n\t\t\tfallbackSQL,\n\t\t\tsession.ClickhouseUser,\n\t\t\tpassword,\n\t\t\ttimeout,\n\t\t)\n\t\telapsed = time.Since(start).Milliseconds()\n\t\tsamplingMode = \"global\"\n\t\twarning = \"Shard virtual column not available for this query; returned global sample instead.\"\n\t}\n\n\tif runErr != nil {\n\t\twriteError(w, http.StatusBadGateway, runErr.Error())\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":       true,\n\t\t\"data\":          result.Data,\n\t\t\"meta\":          result.Meta,\n\t\t\"statistics\":    result.Stats,\n\t\t\"rows\":          countRows(result.Data),\n\t\t\"elapsed_ms\":    elapsed,\n\t\t\"sampling_mode\": samplingMode,\n\t\t\"warning\":       warning,\n\t})\n}\n\n// QueryProfile handles POST /profile and returns latest query_log metrics for the exact SQL.\nfunc (h *QueryHandler) QueryProfile(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tvar req explainRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tquery := strings.TrimSpace(req.Query)\n\tif query == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"Query is required\")\n\t\treturn\n\t}\n\tif !h.enforceGuardrailsForQuery(w, r, query, r.URL.Path) {\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tescapedQuery := escapeLiteral(stripTrailingSemicolon(query))\n\tescapedUser := escapeLiteral(session.ClickhouseUser)\n\n\tprofileSQL := fmt.Sprintf(`SELECT\n  query_duration_ms,\n  read_rows,\n  read_bytes,\n  result_rows,\n  result_bytes,\n  memory_usage,\n  ProfileEvents['SelectedRows'] AS selected_rows,\n  ProfileEvents['SelectedBytes'] AS selected_bytes,\n  ProfileEvents['SelectedMarks'] AS selected_marks\nFROM system.query_log\nWHERE type = 'QueryFinish'\n  AND query = '%s'\n  AND user = '%s'\nORDER BY event_time DESC\nLIMIT 1`, escapedQuery, escapedUser)\n\n\tresult, execErr := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\tprofileSQL,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t10*time.Second,\n\t)\n\tif execErr != nil {\n\t\t// query_log may be unavailable depending on ClickHouse config/version.\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"success\":   true,\n\t\t\t\"available\": false,\n\t\t\t\"reason\":    \"system.query_log is unavailable for this connection\",\n\t\t})\n\t\treturn\n\t}\n\n\trows := decodeRows(result.Data)\n\tif len(rows) == 0 {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"success\":   true,\n\t\t\t\"available\": false,\n\t\t\t\"reason\":    \"No query profile row found yet (query_log flush can be delayed)\",\n\t\t})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":   true,\n\t\t\"available\": true,\n\t\t\"profile\":   rows[0],\n\t})\n}\n\n// StreamQuery handles POST /stream — streaming query execution via NDJSON chunked response.\nfunc (h *QueryHandler) StreamQuery(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tvar req executeQueryRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tquery := strings.TrimSpace(req.Query)\n\tif query == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"Query is required\")\n\t\treturn\n\t}\n\tif !h.enforceGuardrailsForQuery(w, r, query, r.URL.Path) {\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tflusher, ok := w.(http.Flusher)\n\tif !ok {\n\t\twriteError(w, http.StatusInternalServerError, \"Streaming not supported\")\n\t\treturn\n\t}\n\n\tmaxRows := req.MaxResultRows\n\tif maxRows <= 0 {\n\t\tmaxRows = 1000\n\t}\n\tif maxRows > 1_000_000 {\n\t\tmaxRows = 1_000_000\n\t}\n\n\trequestID, stream, err := h.Gateway.ExecuteStreamQuery(\n\t\tsession.ConnectionID,\n\t\tquery,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\tmap[string]string{\n\t\t\t\"max_result_rows\":      strconv.Itoa(maxRows),\n\t\t\t\"result_overflow_mode\": \"break\",\n\t\t},\n\t)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadGateway, err.Error())\n\t\treturn\n\t}\n\tdefer h.Gateway.CleanupStream(session.ConnectionID, requestID)\n\n\tw.Header().Set(\"Content-Type\", \"application/x-ndjson\")\n\tw.Header().Set(\"Cache-Control\", \"no-cache\")\n\tw.Header().Set(\"X-Content-Type-Options\", \"nosniff\")\n\tw.WriteHeader(http.StatusOK)\n\n\tenc := json.NewEncoder(w)\n\tctx := r.Context()\n\n\t// Wait for meta or error\n\tselect {\n\tcase meta := <-stream.MetaCh:\n\t\tenc.Encode(map[string]interface{}{\"type\": \"meta\", \"meta\": meta})\n\t\tflusher.Flush()\n\tcase err := <-stream.ErrorCh:\n\t\tenc.Encode(map[string]interface{}{\"type\": \"error\", \"error\": err.Error()})\n\t\tflusher.Flush()\n\t\treturn\n\tcase <-ctx.Done():\n\t\treturn\n\t}\n\n\t// Read chunks until channel is closed or client disconnects\n\tseq := 0\n\tfor {\n\t\tselect {\n\t\tcase chunk, ok := <-stream.ChunkCh:\n\t\t\tif !ok {\n\t\t\t\tgoto streamDone\n\t\t\t}\n\t\t\tenc.Encode(map[string]interface{}{\"type\": \"chunk\", \"data\": chunk, \"seq\": seq})\n\t\t\tflusher.Flush()\n\t\t\tseq++\n\t\tcase <-ctx.Done():\n\t\t\treturn\n\t\t}\n\t}\n\nstreamDone:\n\t// ChunkCh closed — read final done or error\n\tselect {\n\tcase donePayload := <-stream.DoneCh:\n\t\tvar done tunnel.StreamDone\n\t\tjson.Unmarshal(donePayload, &done)\n\t\tenc.Encode(map[string]interface{}{\n\t\t\t\"type\":       \"done\",\n\t\t\t\"statistics\": done.Statistics,\n\t\t\t\"total_rows\": done.TotalRows,\n\t\t})\n\t\tflusher.Flush()\n\tcase err := <-stream.ErrorCh:\n\t\tenc.Encode(map[string]interface{}{\"type\": \"error\", \"error\": err.Error()})\n\t\tflusher.Flush()\n\tcase <-ctx.Done():\n\t\treturn\n\t}\n\n\t// Audit log\n\tpreview := query\n\tif len(preview) > 100 {\n\t\tpreview = preview[:100] + \"...\"\n\t}\n\tgo func() {\n\t\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\t\tAction:       \"query.stream\",\n\t\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\t\tConnectionID: strPtr(session.ConnectionID),\n\t\t\tDetails:      strPtr(preview),\n\t\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t\t})\n\t}()\n}\n\n// ExplorerData handles POST /explorer-data — server-side paginated data browsing.\nfunc (h *QueryHandler) ExplorerData(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tvar req struct {\n\t\tDatabase   string `json:\"database\"`\n\t\tTable      string `json:\"table\"`\n\t\tPage       int    `json:\"page\"`\n\t\tPageSize   int    `json:\"page_size\"`\n\t\tSortColumn string `json:\"sort_column\"`\n\t\tSortDir    string `json:\"sort_dir\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tif req.Database == \"\" || req.Table == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"database and table are required\")\n\t\treturn\n\t}\n\tif !h.enforceGuardrailsForTable(w, r, req.Database, req.Table, r.URL.Path) {\n\t\treturn\n\t}\n\tif req.PageSize <= 0 || req.PageSize > 1000 {\n\t\treq.PageSize = 100\n\t}\n\tif req.Page < 0 {\n\t\treq.Page = 0\n\t}\n\n\tsortDir := \"ASC\"\n\tif strings.EqualFold(req.SortDir, \"desc\") {\n\t\tsortDir = \"DESC\"\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\t// Build data query with LIMIT/OFFSET\n\toffset := req.Page * req.PageSize\n\tdataSQL := fmt.Sprintf(\"SELECT * FROM %s.%s\", escapeIdentifier(req.Database), escapeIdentifier(req.Table))\n\tif req.SortColumn != \"\" {\n\t\tdataSQL += fmt.Sprintf(\" ORDER BY %s %s\", escapeIdentifier(req.SortColumn), sortDir)\n\t}\n\tdataSQL += fmt.Sprintf(\" LIMIT %d OFFSET %d\", req.PageSize, offset)\n\n\t// Build count query\n\tcountSQL := fmt.Sprintf(\"SELECT count() FROM %s.%s\", escapeIdentifier(req.Database), escapeIdentifier(req.Table))\n\n\t// Execute data query (JSONCompact — positional arrays, smaller payload)\n\tdataRaw, err := h.Gateway.ExecuteQueryWithFormat(\n\t\tsession.ConnectionID, dataSQL, session.ClickhouseUser, password, \"JSONCompact\", 30*time.Second,\n\t)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadGateway, err.Error())\n\t\treturn\n\t}\n\n\t// Execute count query\n\tcountRaw, err := h.Gateway.ExecuteQueryWithFormat(\n\t\tsession.ConnectionID, countSQL, session.ClickhouseUser, password, \"JSONCompact\", 30*time.Second,\n\t)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadGateway, err.Error())\n\t\treturn\n\t}\n\n\t// Parse JSONCompact data result\n\tvar dataCompact struct {\n\t\tMeta json.RawMessage `json:\"meta\"`\n\t\tData json.RawMessage `json:\"data\"`\n\t\tRows int             `json:\"rows\"`\n\t}\n\tjson.Unmarshal(dataRaw, &dataCompact)\n\n\t// Parse count result — JSONCompact: {\"data\":[[12345]]}\n\tvar totalRows int64\n\tvar countCompact struct {\n\t\tData [][]json.RawMessage `json:\"data\"`\n\t}\n\tif err := json.Unmarshal(countRaw, &countCompact); err == nil &&\n\t\tlen(countCompact.Data) > 0 && len(countCompact.Data[0]) > 0 {\n\t\tvar v interface{}\n\t\tif json.Unmarshal(countCompact.Data[0][0], &v) == nil {\n\t\t\tswitch n := v.(type) {\n\t\t\tcase float64:\n\t\t\t\ttotalRows = int64(n)\n\t\t\tcase string:\n\t\t\t\tfmt.Sscanf(n, \"%d\", &totalRows)\n\t\t\t}\n\t\t}\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"success\":    true,\n\t\t\"meta\":       dataCompact.Meta,\n\t\t\"data\":       dataCompact.Data,\n\t\t\"rows\":       dataCompact.Rows,\n\t\t\"total_rows\": totalRows,\n\t\t\"page\":       req.Page,\n\t\t\"page_size\":  req.PageSize,\n\t})\n}\n\n// ListDatabases handles GET /databases.\nfunc (h *QueryHandler) ListDatabases(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\t\"SHOW DATABASES\",\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t30*time.Second,\n\t)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadGateway, err.Error())\n\t\treturn\n\t}\n\n\tnames := extractNames(result.Data)\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"success\":   true,\n\t\t\"databases\": names,\n\t})\n}\n\n// ListTables handles GET /tables?database=X.\nfunc (h *QueryHandler) ListTables(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tdb := r.URL.Query().Get(\"database\")\n\tif db == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"database query parameter is required\")\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tquery := fmt.Sprintf(\"SELECT name, engine FROM system.tables WHERE database = '%s' ORDER BY name\", escapeLiteral(db))\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\tquery,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t30*time.Second,\n\t)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadGateway, err.Error())\n\t\treturn\n\t}\n\n\ttype tableInfo struct {\n\t\tName   string `json:\"name\"`\n\t\tEngine string `json:\"engine\"`\n\t}\n\n\tvar rows []map[string]interface{}\n\ttables := []tableInfo{}\n\tif len(result.Data) > 0 {\n\t\tif err := json.Unmarshal(result.Data, &rows); err == nil {\n\t\t\tfor _, row := range rows {\n\t\t\t\tname, _ := row[\"name\"].(string)\n\t\t\t\tengine, _ := row[\"engine\"].(string)\n\t\t\t\tif name != \"\" {\n\t\t\t\t\ttables = append(tables, tableInfo{Name: name, Engine: engine})\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"success\": true,\n\t\t\"tables\":  tables,\n\t})\n}\n\n// ListColumns handles GET /columns?database=X&table=Y.\nfunc (h *QueryHandler) ListColumns(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tdbName := r.URL.Query().Get(\"database\")\n\ttable := r.URL.Query().Get(\"table\")\n\tif dbName == \"\" || table == \"\" {\n\t\twriteError(w, http.StatusBadRequest, \"database and table query parameters are required\")\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tquery := fmt.Sprintf(\"DESCRIBE TABLE %s.%s\", escapeIdentifier(dbName), escapeIdentifier(table))\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\tquery,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t30*time.Second,\n\t)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadGateway, err.Error())\n\t\treturn\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"success\": true,\n\t\t\"columns\": result.Data,\n\t\t\"meta\":    result.Meta,\n\t})\n}\n\n// ListDataTypes handles GET /data-types.\nfunc (h *QueryHandler) ListDataTypes(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\t\"SELECT name FROM system.data_type_families WHERE is_parametric = 0 ORDER BY name\",\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t15*time.Second,\n\t)\n\tif err != nil {\n\t\t// Fallback for older ClickHouse versions where is_parametric might not exist.\n\t\tslog.Warn(\"Failed to list non-parametric data types; trying fallback query\", \"error\", err, \"connection\", session.ConnectionID)\n\t\tresult, err = h.Gateway.ExecuteQuery(\n\t\t\tsession.ConnectionID,\n\t\t\t\"SELECT name FROM system.data_type_families ORDER BY name\",\n\t\t\tsession.ClickhouseUser,\n\t\t\tpassword,\n\t\t\t15*time.Second,\n\t\t)\n\t\tif err != nil {\n\t\t\tslog.Warn(\"Failed to list data types; returning empty list\", \"error\", err, \"connection\", session.ConnectionID)\n\t\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\t\"success\":    true,\n\t\t\t\t\"data_types\": []string{},\n\t\t\t})\n\t\t\treturn\n\t\t}\n\t}\n\n\trawTypes := extractNames(result.Data)\n\tuniq := make(map[string]struct{}, len(rawTypes))\n\ttypes := make([]string, 0, len(rawTypes))\n\tfor _, t := range rawTypes {\n\t\tt = strings.TrimSpace(t)\n\t\tif t == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tif _, exists := uniq[t]; exists {\n\t\t\tcontinue\n\t\t}\n\t\tuniq[t] = struct{}{}\n\t\ttypes = append(types, t)\n\t}\n\tsort.Strings(types)\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":    true,\n\t\t\"data_types\": types,\n\t})\n}\n\n// ListClusters handles GET /clusters.\nfunc (h *QueryHandler) ListClusters(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\t\"SELECT DISTINCT cluster FROM system.clusters WHERE cluster != '' ORDER BY cluster\",\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t15*time.Second,\n\t)\n\tif err != nil {\n\t\t// Some deployments/users cannot read system.clusters; return an empty list instead of hard failing UI.\n\t\tslog.Warn(\"Failed to list clusters; returning empty list\", \"error\", err, \"connection\", session.ConnectionID)\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"success\":  true,\n\t\t\t\"clusters\": []string{},\n\t\t})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":  true,\n\t\t\"clusters\": extractNames(result.Data),\n\t})\n}\n\n// CreateDatabase handles POST /schema/database.\nfunc (h *QueryHandler) CreateDatabase(w http.ResponseWriter, r *http.Request) {\n\tsession := h.requireSchemaAdmin(w, r)\n\tif session == nil {\n\t\treturn\n\t}\n\n\tvar req createDatabaseRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(req.Name)\n\tif err := validateSimpleObjectName(name, \"database\"); err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tif isSystemDatabaseName(name) {\n\t\twriteError(w, http.StatusBadRequest, \"Cannot create reserved system database\")\n\t\treturn\n\t}\n\n\tengine := strings.TrimSpace(req.Engine)\n\tif engine == \"\" {\n\t\tengine = \"Atomic\"\n\t}\n\tif isUnsafeSQLFragment(engine) {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid engine expression\")\n\t\treturn\n\t}\n\n\tcluster := strings.TrimSpace(req.OnCluster)\n\tif cluster != \"\" && isUnsafeSQLFragment(cluster) {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid cluster name\")\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tifNotExists := req.IfNotExists == nil || *req.IfNotExists\n\n\tvar sqlBuilder strings.Builder\n\tsqlBuilder.WriteString(\"CREATE DATABASE \")\n\tif ifNotExists {\n\t\tsqlBuilder.WriteString(\"IF NOT EXISTS \")\n\t}\n\tsqlBuilder.WriteString(escapeIdentifier(name))\n\tif cluster != \"\" {\n\t\tsqlBuilder.WriteString(\" ON CLUSTER \")\n\t\tsqlBuilder.WriteString(escapeIdentifier(cluster))\n\t}\n\tsqlBuilder.WriteString(\" ENGINE = \")\n\tsqlBuilder.WriteString(engine)\n\n\tsql := sqlBuilder.String()\n\tif _, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\tsql,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t45*time.Second,\n\t); err != nil {\n\t\twriteError(w, http.StatusBadGateway, fmt.Sprintf(\"%s\\n\\nCommand:\\n%s\", err.Error(), sql))\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"schema.database.create\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"database=%s engine=%s cluster=%s\", name, engine, cluster)),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\n\t\t\"success\":  true,\n\t\t\"database\": name,\n\t})\n}\n\n// DropDatabase handles POST /schema/database/drop.\nfunc (h *QueryHandler) DropDatabase(w http.ResponseWriter, r *http.Request) {\n\tsession := h.requireSchemaAdmin(w, r)\n\tif session == nil {\n\t\treturn\n\t}\n\n\tvar req dropDatabaseRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(req.Name)\n\tif err := validateSimpleObjectName(name, \"database\"); err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tif isSystemDatabaseName(name) {\n\t\twriteError(w, http.StatusBadRequest, \"Cannot drop system database\")\n\t\treturn\n\t}\n\tcluster := strings.TrimSpace(req.OnCluster)\n\tif cluster != \"\" && isUnsafeSQLFragment(cluster) {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid cluster name\")\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tifExists := req.IfExists == nil || *req.IfExists\n\n\tvar sqlBuilder strings.Builder\n\tsqlBuilder.WriteString(\"DROP DATABASE \")\n\tif ifExists {\n\t\tsqlBuilder.WriteString(\"IF EXISTS \")\n\t}\n\tsqlBuilder.WriteString(escapeIdentifier(name))\n\tif cluster != \"\" {\n\t\tsqlBuilder.WriteString(\" ON CLUSTER \")\n\t\tsqlBuilder.WriteString(escapeIdentifier(cluster))\n\t}\n\tif req.Sync {\n\t\tsqlBuilder.WriteString(\" SYNC\")\n\t}\n\n\tsql := sqlBuilder.String()\n\tif _, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\tsql,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t45*time.Second,\n\t); err != nil {\n\t\twriteError(w, http.StatusBadGateway, err.Error())\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"schema.database.drop\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"database=%s cluster=%s sync=%t\", name, cluster, req.Sync)),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":  true,\n\t\t\"database\": name,\n\t})\n}\n\n// CreateTable handles POST /schema/table.\nfunc (h *QueryHandler) CreateTable(w http.ResponseWriter, r *http.Request) {\n\tsession := h.requireSchemaAdmin(w, r)\n\tif session == nil {\n\t\treturn\n\t}\n\n\tvar req createTableRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tdbName := strings.TrimSpace(req.Database)\n\ttableName := strings.TrimSpace(req.Name)\n\tif err := validateSimpleObjectName(dbName, \"database\"); err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tif err := validateSimpleObjectName(tableName, \"table\"); err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tif isSystemDatabaseName(dbName) {\n\t\twriteError(w, http.StatusBadRequest, \"Cannot create tables in system databases\")\n\t\treturn\n\t}\n\n\tif len(req.Columns) == 0 {\n\t\twriteError(w, http.StatusBadRequest, \"At least one column is required\")\n\t\treturn\n\t}\n\n\tengine := strings.TrimSpace(req.Engine)\n\tif engine == \"\" {\n\t\tengine = \"MergeTree\"\n\t}\n\tif isUnsafeSQLFragment(engine) {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid engine expression\")\n\t\treturn\n\t}\n\n\tcluster := strings.TrimSpace(req.OnCluster)\n\tif cluster != \"\" && isUnsafeSQLFragment(cluster) {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid cluster name\")\n\t\treturn\n\t}\n\n\tcolumnsSQL := make([]string, 0, len(req.Columns))\n\tfor i, col := range req.Columns {\n\t\tcolName := strings.TrimSpace(col.Name)\n\t\tcolType := strings.TrimSpace(col.Type)\n\t\tif err := validateSimpleObjectName(colName, fmt.Sprintf(\"column #%d\", i+1)); err != nil {\n\t\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\t\treturn\n\t\t}\n\t\tif colType == \"\" || isUnsafeSQLFragment(colType) {\n\t\t\twriteError(w, http.StatusBadRequest, fmt.Sprintf(\"Invalid type for column %q\", colName))\n\t\t\treturn\n\t\t}\n\n\t\tpart := escapeIdentifier(colName) + \" \" + colType\n\t\tif def := strings.TrimSpace(col.DefaultExpression); def != \"\" {\n\t\t\tif isUnsafeSQLFragment(def) {\n\t\t\t\twriteError(w, http.StatusBadRequest, fmt.Sprintf(\"Invalid default expression for column %q\", colName))\n\t\t\t\treturn\n\t\t\t}\n\t\t\tpart += \" DEFAULT \" + def\n\t\t}\n\t\tif comment := strings.TrimSpace(col.Comment); comment != \"\" {\n\t\t\tpart += \" COMMENT '\" + escapeLiteral(comment) + \"'\"\n\t\t}\n\t\tcolumnsSQL = append(columnsSQL, part)\n\t}\n\n\torderBy := strings.TrimSpace(req.OrderBy)\n\tpartitionBy := strings.TrimSpace(req.PartitionBy)\n\tprimaryKey := strings.TrimSpace(req.PrimaryKey)\n\tsampleBy := strings.TrimSpace(req.SampleBy)\n\tttl := strings.TrimSpace(req.TTL)\n\tsettings := strings.TrimSpace(req.Settings)\n\tcomment := strings.TrimSpace(req.Comment)\n\n\texpressions := []struct {\n\t\tname  string\n\t\tvalue string\n\t}{\n\t\t{name: \"order_by\", value: orderBy},\n\t\t{name: \"partition_by\", value: partitionBy},\n\t\t{name: \"primary_key\", value: primaryKey},\n\t\t{name: \"sample_by\", value: sampleBy},\n\t\t{name: \"ttl\", value: ttl},\n\t\t{name: \"settings\", value: settings},\n\t}\n\tfor _, expr := range expressions {\n\t\tif expr.value != \"\" && isUnsafeSQLFragment(expr.value) {\n\t\t\twriteError(w, http.StatusBadRequest, fmt.Sprintf(\"Invalid %s expression\", expr.name))\n\t\t\treturn\n\t\t}\n\t}\n\n\tif strings.Contains(strings.ToLower(engine), \"mergetree\") && orderBy == \"\" {\n\t\torderBy = \"tuple()\"\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tifNotExists := req.IfNotExists == nil || *req.IfNotExists\n\n\tvar sqlBuilder strings.Builder\n\tsqlBuilder.WriteString(\"CREATE TABLE \")\n\tif ifNotExists {\n\t\tsqlBuilder.WriteString(\"IF NOT EXISTS \")\n\t}\n\tsqlBuilder.WriteString(escapeIdentifier(dbName))\n\tsqlBuilder.WriteString(\".\")\n\tsqlBuilder.WriteString(escapeIdentifier(tableName))\n\tif cluster != \"\" {\n\t\tsqlBuilder.WriteString(\" ON CLUSTER \")\n\t\tsqlBuilder.WriteString(escapeIdentifier(cluster))\n\t}\n\tsqlBuilder.WriteString(\" (\\n  \")\n\tsqlBuilder.WriteString(strings.Join(columnsSQL, \",\\n  \"))\n\tsqlBuilder.WriteString(\"\\n)\")\n\tsqlBuilder.WriteString(\"\\nENGINE = \")\n\tsqlBuilder.WriteString(engine)\n\tif partitionBy != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nPARTITION BY \")\n\t\tsqlBuilder.WriteString(partitionBy)\n\t}\n\tif orderBy != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nORDER BY \")\n\t\tsqlBuilder.WriteString(orderBy)\n\t}\n\tif primaryKey != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nPRIMARY KEY \")\n\t\tsqlBuilder.WriteString(primaryKey)\n\t}\n\tif sampleBy != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nSAMPLE BY \")\n\t\tsqlBuilder.WriteString(sampleBy)\n\t}\n\tif ttl != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nTTL \")\n\t\tsqlBuilder.WriteString(ttl)\n\t}\n\tif settings != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nSETTINGS \")\n\t\tsqlBuilder.WriteString(settings)\n\t}\n\tif comment != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nCOMMENT '\")\n\t\tsqlBuilder.WriteString(escapeLiteral(comment))\n\t\tsqlBuilder.WriteString(\"'\")\n\t}\n\n\tsql := sqlBuilder.String()\n\tif _, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\tsql,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t45*time.Second,\n\t); err != nil {\n\t\twriteError(w, http.StatusBadGateway, err.Error())\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"schema.table.create\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"table=%s.%s engine=%s cluster=%s\", dbName, tableName, engine, cluster)),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusCreated, map[string]interface{}{\n\t\t\"success\":  true,\n\t\t\"database\": dbName,\n\t\t\"table\":    tableName,\n\t\t\"command\":  sql,\n\t})\n}\n\n// DropTable handles POST /schema/table/drop.\nfunc (h *QueryHandler) DropTable(w http.ResponseWriter, r *http.Request) {\n\tsession := h.requireSchemaAdmin(w, r)\n\tif session == nil {\n\t\treturn\n\t}\n\n\tvar req dropTableRequest\n\tif err := json.NewDecoder(r.Body).Decode(&req); err != nil {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\tdbName := strings.TrimSpace(req.Database)\n\ttableName := strings.TrimSpace(req.Name)\n\tif err := validateSimpleObjectName(dbName, \"database\"); err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tif err := validateSimpleObjectName(tableName, \"table\"); err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tif isSystemDatabaseName(dbName) {\n\t\twriteError(w, http.StatusBadRequest, \"Cannot drop tables from system databases\")\n\t\treturn\n\t}\n\n\tcluster := strings.TrimSpace(req.OnCluster)\n\tif cluster != \"\" && isUnsafeSQLFragment(cluster) {\n\t\twriteError(w, http.StatusBadRequest, \"Invalid cluster name\")\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tifExists := req.IfExists == nil || *req.IfExists\n\n\tvar sqlBuilder strings.Builder\n\tsqlBuilder.WriteString(\"DROP TABLE \")\n\tif ifExists {\n\t\tsqlBuilder.WriteString(\"IF EXISTS \")\n\t}\n\tsqlBuilder.WriteString(escapeIdentifier(dbName))\n\tsqlBuilder.WriteString(\".\")\n\tsqlBuilder.WriteString(escapeIdentifier(tableName))\n\tif cluster != \"\" {\n\t\tsqlBuilder.WriteString(\" ON CLUSTER \")\n\t\tsqlBuilder.WriteString(escapeIdentifier(cluster))\n\t}\n\tif req.Sync {\n\t\tsqlBuilder.WriteString(\" SYNC\")\n\t}\n\n\tsql := sqlBuilder.String()\n\tif _, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\tsql,\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t45*time.Second,\n\t); err != nil {\n\t\twriteError(w, http.StatusBadGateway, err.Error())\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"schema.table.drop\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"table=%s.%s cluster=%s sync=%t\", dbName, tableName, cluster, req.Sync)),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":  true,\n\t\t\"database\": dbName,\n\t\t\"table\":    tableName,\n\t})\n}\n\n// GetHostInfo handles GET /host-info.\nfunc (h *QueryHandler) GetHostInfo(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tinfo, err := h.DB.GetConnectionHostInfo(session.ConnectionID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get host info\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to retrieve host info\")\n\t\treturn\n\t}\n\n\tif info == nil {\n\t\twriteError(w, http.StatusNotFound, \"Host info not available\")\n\t\treturn\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"success\":   true,\n\t\t\"host_info\": info,\n\t})\n}\n\n// ListCompletions handles GET /completions — returns ClickHouse functions and keywords for autocomplete.\nfunc (h *QueryHandler) ListCompletions(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\t// Fetch functions\n\tfnResult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\t\"SELECT name FROM system.functions\",\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t15*time.Second,\n\t)\n\n\tfunctions := []string{}\n\tif err == nil {\n\t\tfunctions = extractNames(fnResult.Data)\n\t}\n\n\t// Fetch keywords\n\tkwResult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\t\"SELECT keyword FROM system.keywords\",\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t15*time.Second,\n\t)\n\n\tkeywords := []string{}\n\tif err == nil {\n\t\tkeywords = extractNames(kwResult.Data)\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"success\":   true,\n\t\t\"functions\": functions,\n\t\t\"keywords\":  keywords,\n\t})\n}\n\n// --- Helpers ---\n\n// writeJSON writes a JSON response with the given status code.\nfunc writeJSON(w http.ResponseWriter, status int, v interface{}) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(status)\n\tjson.NewEncoder(w).Encode(v)\n}\n\n// writeError writes a JSON error response.\nfunc writeError(w http.ResponseWriter, status int, message string) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(status)\n\tjson.NewEncoder(w).Encode(map[string]interface{}{\n\t\t\"success\": false,\n\t\t\"error\":   message,\n\t})\n}\n\nfunc (h *QueryHandler) guardrailsEnabled() bool {\n\tif h.Guardrails == nil {\n\t\treturn false\n\t}\n\tif h.Config == nil {\n\t\treturn true\n\t}\n\treturn h.Config.IsPro()\n}\n\nfunc (h *QueryHandler) enforceGuardrailsForQuery(w http.ResponseWriter, r *http.Request, queryText, requestEndpoint string) bool {\n\tif !h.guardrailsEnabled() {\n\t\treturn true\n\t}\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn false\n\t}\n\n\tdecision, err := h.Guardrails.EvaluateQuery(session.ConnectionID, session.ClickhouseUser, queryText, requestEndpoint)\n\tif err != nil {\n\t\tslog.Error(\"Guardrail pre-exec evaluation failed\", \"connection\", session.ConnectionID, \"endpoint\", requestEndpoint, \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to evaluate governance guardrails\")\n\t\treturn false\n\t}\n\tif decision.Allowed {\n\t\treturn true\n\t}\n\th.writePolicyBlocked(w, decision.Block)\n\treturn false\n}\n\nfunc (h *QueryHandler) enforceGuardrailsForTable(w http.ResponseWriter, r *http.Request, databaseName, tableName, requestEndpoint string) bool {\n\tif !h.guardrailsEnabled() {\n\t\treturn true\n\t}\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn false\n\t}\n\n\tdecision, err := h.Guardrails.EvaluateTable(session.ConnectionID, session.ClickhouseUser, databaseName, tableName, requestEndpoint)\n\tif err != nil {\n\t\tslog.Error(\"Guardrail table pre-exec evaluation failed\", \"connection\", session.ConnectionID, \"database\", databaseName, \"table\", tableName, \"endpoint\", requestEndpoint, \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to evaluate governance guardrails\")\n\t\treturn false\n\t}\n\tif decision.Allowed {\n\t\treturn true\n\t}\n\th.writePolicyBlocked(w, decision.Block)\n\treturn false\n}\n\nfunc (h *QueryHandler) writePolicyBlocked(w http.ResponseWriter, block *governance.GuardrailBlock) {\n\tif block == nil {\n\t\twriteJSON(w, http.StatusForbidden, map[string]interface{}{\n\t\t\t\"success\": false,\n\t\t\t\"error\":   \"Query blocked by governance policy\",\n\t\t\t\"code\":    \"policy_blocked\",\n\t\t})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusForbidden, map[string]interface{}{\n\t\t\"success\":          false,\n\t\t\"error\":            block.Detail,\n\t\t\"code\":             \"policy_blocked\",\n\t\t\"policy_id\":        block.PolicyID,\n\t\t\"policy_name\":      block.PolicyName,\n\t\t\"severity\":         block.Severity,\n\t\t\"enforcement_mode\": block.EnforcementMode,\n\t\t\"violation_id\":     block.ViolationID,\n\t})\n}\n\n// escapeIdentifier wraps a SQL identifier in backticks and escapes any inner backticks.\nfunc escapeIdentifier(name string) string {\n\tescaped := strings.ReplaceAll(name, \"`\", \"``\")\n\treturn \"`\" + escaped + \"`\"\n}\n\n// escapeLiteral escapes single quotes and backslashes for ClickHouse SQL string literals.\n// ClickHouse uses ” (doubled single-quote) to escape single quotes in string literals.\nfunc escapeLiteral(value string) string {\n\treturn strings.ReplaceAll(strings.ReplaceAll(value, \"\\\\\", \"\\\\\\\\\"), \"'\", \"''\")\n}\n\nfunc stripTrailingSemicolon(query string) string {\n\treturn strings.TrimRight(query, \" \\n\\t;\")\n}\n\nfunc stripFormatClause(query string) string {\n\tre := regexp.MustCompile(`(?is)\\s+FORMAT\\s+\\w+\\s*$`)\n\treturn strings.TrimSpace(re.ReplaceAllString(query, \"\"))\n}\n\nfunc isReadOnlyQuery(query string) bool {\n\tre := regexp.MustCompile(`(?is)^\\s*(SELECT|WITH|SHOW|DESC|DESCRIBE|EXPLAIN)\\b`)\n\treturn re.MatchString(query)\n}\n\nfunc (h *QueryHandler) requireSchemaAdmin(w http.ResponseWriter, r *http.Request) *middleware.SessionInfo {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteError(w, http.StatusUnauthorized, \"Not authenticated\")\n\t\treturn nil\n\t}\n\n\tisAdmin, err := h.DB.IsUserRole(session.ClickhouseUser, \"admin\")\n\tif err != nil {\n\t\twriteError(w, http.StatusInternalServerError, \"Role check failed\")\n\t\treturn nil\n\t}\n\tif !isAdmin {\n\t\twriteError(w, http.StatusForbidden, \"Admin role required for schema changes\")\n\t\treturn nil\n\t}\n\treturn session\n}\n\nfunc validateSimpleObjectName(name string, label string) error {\n\tif strings.TrimSpace(name) == \"\" {\n\t\treturn fmt.Errorf(\"%s name is required\", label)\n\t}\n\tif strings.Contains(name, \".\") {\n\t\treturn fmt.Errorf(\"%s name cannot contain '.'\", label)\n\t}\n\tif strings.ContainsAny(name, \"\\x00\\r\\n\\t\") {\n\t\treturn fmt.Errorf(\"%s name contains invalid control characters\", label)\n\t}\n\treturn nil\n}\n\nfunc isUnsafeSQLFragment(value string) bool {\n\tv := strings.TrimSpace(strings.ToLower(value))\n\tif v == \"\" {\n\t\treturn false\n\t}\n\t// Check for SQL injection patterns: statement terminators, comments, and null bytes\n\treturn strings.Contains(v, \";\") ||\n\t\tstrings.Contains(v, \"--\") ||\n\t\tstrings.Contains(v, \"/*\") ||\n\t\tstrings.Contains(v, \"*/\") ||\n\t\tstrings.ContainsAny(v, \"\\x00\\r\\n\")\n}\n\nfunc isSystemDatabaseName(name string) bool {\n\tswitch strings.ToLower(strings.TrimSpace(name)) {\n\tcase \"system\", \"information_schema\":\n\t\treturn true\n\tdefault:\n\t\treturn false\n\t}\n}\n\nfunc shouldFallbackToGlobalSample(message string) bool {\n\tmsg := strings.ToLower(message)\n\treturn strings.Contains(msg, \"_shard_num\") ||\n\t\tstrings.Contains(msg, \"unknown identifier\") ||\n\t\tstrings.Contains(msg, \"unknown column\")\n}\n\n// strPtr returns a pointer to the given string.\nfunc strPtr(s string) *string {\n\treturn &s\n}\n\nfunc decodeRows(data json.RawMessage) []map[string]interface{} {\n\tif len(data) == 0 {\n\t\treturn nil\n\t}\n\tvar rows []map[string]interface{}\n\tif err := json.Unmarshal(data, &rows); err == nil {\n\t\treturn rows\n\t}\n\n\t// Fallback for positional arrays (rare on this path)\n\tvar arrRows [][]interface{}\n\tif err := json.Unmarshal(data, &arrRows); err == nil {\n\t\tnormalized := make([]map[string]interface{}, 0, len(arrRows))\n\t\tfor _, row := range arrRows {\n\t\t\tobj := map[string]interface{}{\"value\": row}\n\t\t\tnormalized = append(normalized, obj)\n\t\t}\n\t\treturn normalized\n\t}\n\treturn nil\n}\n\nfunc extractExplainLines(data json.RawMessage) []string {\n\trows := decodeRows(data)\n\tif len(rows) == 0 {\n\t\treturn nil\n\t}\n\tlines := make([]string, 0, len(rows))\n\tfor _, row := range rows {\n\t\tif len(row) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\t// Common column names in EXPLAIN output first.\n\t\tpriority := []string{\"explain\", \"plan\", \"explain_plan\", \"explain_ast\", \"value\"}\n\t\tpicked := \"\"\n\t\tfor _, key := range priority {\n\t\t\tif v, ok := row[key]; ok {\n\t\t\t\tpicked = fmt.Sprint(v)\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif picked == \"\" {\n\t\t\tkeys := make([]string, 0, len(row))\n\t\t\tfor k := range row {\n\t\t\t\tkeys = append(keys, k)\n\t\t\t}\n\t\t\tsort.Strings(keys)\n\t\t\tpicked = fmt.Sprint(row[keys[0]])\n\t\t}\n\t\tpicked = strings.TrimSpace(picked)\n\t\tif picked != \"\" {\n\t\t\tlines = append(lines, picked)\n\t\t}\n\t}\n\treturn lines\n}\n\nfunc buildPlanTree(lines []string) []planNode {\n\tnodes := make([]planNode, 0, len(lines))\n\tstack := make([]string, 0, 16)\n\n\tfor i, line := range lines {\n\t\tlevel := planLineLevel(line)\n\t\tlabel := cleanPlanLabel(line)\n\t\tif label == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tif level < 0 {\n\t\t\tlevel = 0\n\t\t}\n\t\tif level > len(stack) {\n\t\t\tlevel = len(stack)\n\t\t}\n\n\t\tid := fmt.Sprintf(\"n%d\", i+1)\n\t\tvar parentID *string\n\t\tif level > 0 && level-1 < len(stack) {\n\t\t\tparent := stack[level-1]\n\t\t\tparentID = &parent\n\t\t}\n\n\t\tif level == len(stack) {\n\t\t\tstack = append(stack, id)\n\t\t} else {\n\t\t\tstack[level] = id\n\t\t\tstack = stack[:level+1]\n\t\t}\n\n\t\tnodes = append(nodes, planNode{\n\t\t\tID:       id,\n\t\t\tParentID: parentID,\n\t\t\tLevel:    level,\n\t\t\tLabel:    label,\n\t\t})\n\t}\n\treturn nodes\n}\n\nfunc planLineLevel(line string) int {\n\tlevel := 0\n\trunes := []rune(line)\n\tfor i := 0; i < len(runes); {\n\t\tif i+1 < len(runes) && runes[i] == ' ' && runes[i+1] == ' ' {\n\t\t\tlevel++\n\t\t\ti += 2\n\t\t\tcontinue\n\t\t}\n\t\tif i+1 < len(runes) && runes[i] == '│' && runes[i+1] == ' ' {\n\t\t\tlevel++\n\t\t\ti += 2\n\t\t\tcontinue\n\t\t}\n\t\tif runes[i] == ' ' || runes[i] == '│' {\n\t\t\ti++\n\t\t\tcontinue\n\t\t}\n\t\tbreak\n\t}\n\treturn level\n}\n\nfunc cleanPlanLabel(line string) string {\n\tlabel := strings.TrimSpace(line)\n\tlabel = strings.TrimLeft(label, \"│ \")\n\tlabel = strings.TrimPrefix(label, \"└─\")\n\tlabel = strings.TrimPrefix(label, \"├─\")\n\tlabel = strings.TrimPrefix(label, \"─\")\n\treturn strings.TrimSpace(label)\n}\n\n// countRows attempts to determine the number of rows in a JSON data payload.\n// The data is expected to be a JSON array.\nfunc countRows(data json.RawMessage) int {\n\tif len(data) == 0 {\n\t\treturn 0\n\t}\n\tvar arr []json.RawMessage\n\tif err := json.Unmarshal(data, &arr); err != nil {\n\t\treturn 0\n\t}\n\treturn len(arr)\n}\n\n// extractNames extracts the first string value from each object in a JSON array.\n// Used for SHOW DATABASES and SHOW TABLES results where each row has a single column.\nfunc extractNames(data json.RawMessage) []string {\n\tif len(data) == 0 {\n\t\treturn []string{}\n\t}\n\n\tvar rows []map[string]interface{}\n\tif err := json.Unmarshal(data, &rows); err != nil {\n\t\treturn []string{}\n\t}\n\n\tnames := make([]string, 0, len(rows))\n\tfor _, row := range rows {\n\t\t// Take the first (and typically only) value from the row\n\t\tfor _, v := range row {\n\t\t\tif s, ok := v.(string); ok {\n\t\t\t\tnames = append(names, s)\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\treturn names\n}\n\n// formatSQL performs basic SQL formatting: uppercases keywords and adds newlines\n// before major clauses.\nfunc formatSQL(sql string) string {\n\t// Uppercase SQL keywords\n\tkeywords := []string{\n\t\t\"SELECT\", \"FROM\", \"WHERE\", \"AND\", \"OR\", \"ORDER BY\", \"GROUP BY\",\n\t\t\"HAVING\", \"LIMIT\", \"OFFSET\", \"JOIN\", \"LEFT JOIN\", \"RIGHT JOIN\",\n\t\t\"INNER JOIN\", \"OUTER JOIN\", \"FULL JOIN\", \"CROSS JOIN\",\n\t\t\"ON\", \"AS\", \"IN\", \"NOT\", \"NULL\", \"IS\", \"BETWEEN\", \"LIKE\",\n\t\t\"INSERT\", \"INTO\", \"VALUES\", \"UPDATE\", \"SET\", \"DELETE\",\n\t\t\"CREATE\", \"TABLE\", \"ALTER\", \"DROP\", \"INDEX\",\n\t\t\"DISTINCT\", \"UNION\", \"ALL\", \"EXISTS\", \"CASE\", \"WHEN\", \"THEN\",\n\t\t\"ELSE\", \"END\", \"ASC\", \"DESC\", \"WITH\", \"FORMAT\",\n\t}\n\n\tresult := sql\n\n\t// Replace keywords with uppercase versions (word-boundary aware)\n\tfor _, kw := range keywords {\n\t\tpattern := `(?i)\\b` + regexp.QuoteMeta(kw) + `\\b`\n\t\tre := regexp.MustCompile(pattern)\n\t\tresult = re.ReplaceAllString(result, kw)\n\t}\n\n\t// Add newlines before major clauses\n\tclauses := []string{\n\t\t\"SELECT\", \"FROM\", \"WHERE\", \"ORDER BY\", \"GROUP BY\", \"HAVING\",\n\t\t\"LIMIT\", \"LEFT JOIN\", \"RIGHT JOIN\", \"INNER JOIN\", \"OUTER JOIN\",\n\t\t\"FULL JOIN\", \"CROSS JOIN\", \"JOIN\", \"UNION\",\n\t}\n\tfor _, clause := range clauses {\n\t\tpattern := `(?m)\\s+` + regexp.QuoteMeta(clause) + `\\b`\n\t\tre := regexp.MustCompile(pattern)\n\t\tresult = re.ReplaceAllString(result, \"\\n\"+clause)\n\t}\n\n\treturn strings.TrimSpace(result)\n}\n"
  },
  {
    "path": "internal/server/handlers/query_guardrails_test.go",
    "content": "package handlers\n\nimport (\n\t\"bytes\"\n\t\"net/http\"\n\t\"net/http/httptest\"\n\t\"path/filepath\"\n\t\"testing\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/governance\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n)\n\nfunc TestQueryEndpointsBlockedByGuardrailPolicy(t *testing.T) {\n\th, cleanup := newBlockedQueryHandler(t)\n\tdefer cleanup()\n\n\ttests := []struct {\n\t\tname       string\n\t\tpath       string\n\t\tbody       string\n\t\tinvoke     func(http.ResponseWriter, *http.Request)\n\t\twantCTJSON bool\n\t}{\n\t\t{name: \"run\", path: \"/api/query/run\", body: `{\"query\":\"SELECT * FROM db.tbl\"}`, invoke: h.ExecuteQuery, wantCTJSON: true},\n\t\t{name: \"stream\", path: \"/api/query/stream\", body: `{\"query\":\"SELECT * FROM db.tbl\"}`, invoke: h.StreamQuery, wantCTJSON: true},\n\t\t{name: \"sample\", path: \"/api/query/sample\", body: `{\"query\":\"SELECT * FROM db.tbl\"}`, invoke: h.SampleQuery, wantCTJSON: true},\n\t\t{name: \"explain\", path: \"/api/query/explain\", body: `{\"query\":\"SELECT * FROM db.tbl\"}`, invoke: h.ExplainQuery, wantCTJSON: true},\n\t\t{name: \"plan\", path: \"/api/query/plan\", body: `{\"query\":\"SELECT * FROM db.tbl\"}`, invoke: h.QueryPlan, wantCTJSON: true},\n\t\t{name: \"profile\", path: \"/api/query/profile\", body: `{\"query\":\"SELECT * FROM db.tbl\"}`, invoke: h.QueryProfile, wantCTJSON: true},\n\t\t{name: \"explorer\", path: \"/api/query/explorer-data\", body: `{\"database\":\"db\",\"table\":\"tbl\"}`, invoke: h.ExplorerData, wantCTJSON: true},\n\t}\n\n\tfor _, tc := range tests {\n\t\tt.Run(tc.name, func(t *testing.T) {\n\t\t\treq := httptest.NewRequest(http.MethodPost, tc.path, bytes.NewBufferString(tc.body))\n\t\t\treq.Header.Set(\"Content-Type\", \"application/json\")\n\t\t\treq = req.WithContext(middleware.SetSession(req.Context(), &middleware.SessionInfo{\n\t\t\t\tConnectionID:      \"conn-1\",\n\t\t\t\tClickhouseUser:    \"alice\",\n\t\t\t\tEncryptedPassword: \"unused\",\n\t\t\t}))\n\n\t\t\trr := httptest.NewRecorder()\n\t\t\ttc.invoke(rr, req)\n\n\t\t\tif rr.Code != http.StatusForbidden {\n\t\t\t\tt.Fatalf(\"expected status 403, got %d body=%s\", rr.Code, rr.Body.String())\n\t\t\t}\n\t\t\tif tc.wantCTJSON && rr.Header().Get(\"Content-Type\") != \"application/json\" {\n\t\t\t\tt.Fatalf(\"expected application/json content type, got %q\", rr.Header().Get(\"Content-Type\"))\n\t\t\t}\n\t\t\tif !bytes.Contains(rr.Body.Bytes(), []byte(`\"code\":\"policy_blocked\"`)) {\n\t\t\t\tt.Fatalf(\"expected policy_blocked code in response, got %s\", rr.Body.String())\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc newBlockedQueryHandler(t *testing.T) (*QueryHandler, func()) {\n\tt.Helper()\n\n\tdbPath := filepath.Join(t.TempDir(), \"query_guardrails.db\")\n\tdb, err := database.Open(dbPath)\n\tif err != nil {\n\t\tt.Fatalf(\"open db: %v\", err)\n\t}\n\n\tstore := governance.NewStore(db)\n\tservice := governance.NewGuardrailService(store, db)\n\n\tif _, err := db.Conn().Exec(\n\t\t`INSERT INTO connections (id, name, tunnel_token, status) VALUES (?, ?, ?, ?)`,\n\t\t\"conn-1\", \"Local\", \"token-1\", \"connected\",\n\t); err != nil {\n\t\tt.Fatalf(\"insert connection: %v\", err)\n\t}\n\n\tif err := store.UpsertSyncState(\"conn-1\", string(governance.SyncAccess), \"idle\", nil, nil, 0); err != nil {\n\t\tt.Fatalf(\"upsert access sync state: %v\", err)\n\t}\n\n\tif _, err := store.CreatePolicy(\n\t\t\"conn-1\",\n\t\t\"Block table\",\n\t\t\"\",\n\t\t\"table\",\n\t\t\"db\",\n\t\t\"tbl\",\n\t\t\"\",\n\t\t\"analyst\",\n\t\t\"warn\",\n\t\t\"block\",\n\t\t\"admin\",\n\t); err != nil {\n\t\tt.Fatalf(\"create policy: %v\", err)\n\t}\n\n\th := &QueryHandler{\n\t\tDB:         db,\n\t\tGuardrails: service,\n\t\tConfig:     nil,\n\t}\n\n\tcleanup := func() {\n\t\t_ = db.Close()\n\t}\n\treturn h, cleanup\n}\n"
  },
  {
    "path": "internal/server/handlers/query_upload.go",
    "content": "package handlers\n\nimport (\n\t\"bufio\"\n\t\"bytes\"\n\t\"encoding/csv\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"log/slog\"\n\t\"math\"\n\t\"net/http\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"sort\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/xitongsys/parquet-go-source/local\"\n\t\"github.com/xitongsys/parquet-go/reader\"\n)\n\nconst (\n\tmaxUploadBytes       = 25 * 1024 * 1024\n\tmaxUploadPreviewRows = 20\n)\n\ntype uploadDiscoveredColumn struct {\n\tName     string `json:\"name\"`\n\tType     string `json:\"type\"`\n\tNullable bool   `json:\"nullable\"`\n\tSample   string `json:\"sample,omitempty\"`\n}\n\ntype parsedUploadDataset struct {\n\tRows        []map[string]interface{}\n\tColumnOrder []string\n}\n\ntype uploadInsertColumn struct {\n\tName string\n\tType string\n}\n\n// DiscoverUploadSchema handles POST /upload/discover.\nfunc (h *QueryHandler) DiscoverUploadSchema(w http.ResponseWriter, r *http.Request) {\n\tsession := h.requireSchemaAdmin(w, r)\n\tif session == nil {\n\t\treturn\n\t}\n\n\tfilename, format, payload, err := readUploadFile(w, r)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\n\tdataset, err := parseUploadDataset(format, payload)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tif len(dataset.Rows) == 0 {\n\t\twriteError(w, http.StatusBadRequest, \"Uploaded file has no rows\")\n\t\treturn\n\t}\n\n\tcolumns := inferUploadColumns(dataset)\n\tpreview := dataset.Rows\n\tif len(preview) > maxUploadPreviewRows {\n\t\tpreview = preview[:maxUploadPreviewRows]\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":  true,\n\t\t\"filename\": filename,\n\t\t\"format\":   format,\n\t\t\"rows\":     len(dataset.Rows),\n\t\t\"columns\":  columns,\n\t\t\"preview\":  preview,\n\t})\n}\n\n// IngestUpload handles POST /upload/ingest.\nfunc (h *QueryHandler) IngestUpload(w http.ResponseWriter, r *http.Request) {\n\tsession := h.requireSchemaAdmin(w, r)\n\tif session == nil {\n\t\treturn\n\t}\n\n\tfilename, format, payload, err := readUploadFile(w, r)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\n\tdataset, err := parseUploadDataset(format, payload)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tif len(dataset.Rows) == 0 {\n\t\twriteError(w, http.StatusBadRequest, \"Uploaded file has no rows\")\n\t\treturn\n\t}\n\n\tdbName := strings.TrimSpace(r.FormValue(\"database\"))\n\ttableName := strings.TrimSpace(r.FormValue(\"table\"))\n\tif err := validateSimpleObjectName(dbName, \"database\"); err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tif err := validateSimpleObjectName(tableName, \"table\"); err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tif isSystemDatabaseName(dbName) {\n\t\twriteError(w, http.StatusBadRequest, \"Cannot upload into system databases\")\n\t\treturn\n\t}\n\n\tcreateTable := parseMultipartBool(r.FormValue(\"create_table\"), false)\n\tcolumns, err := parseUploadColumnsForm(r.FormValue(\"columns\"))\n\tif err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tif len(columns) == 0 {\n\t\tinferred := inferUploadColumns(dataset)\n\t\tcolumns = make([]uploadDiscoveredColumn, 0, len(inferred))\n\t\tfor _, col := range inferred {\n\t\t\tcolumns = append(columns, uploadDiscoveredColumn{\n\t\t\t\tName: col.Name,\n\t\t\t\tType: col.Type,\n\t\t\t})\n\t\t}\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteError(w, http.StatusInternalServerError, \"Failed to decrypt credentials\")\n\t\treturn\n\t}\n\n\tcreatedTable := false\n\tcreateTableSQL := \"\"\n\tif createTable {\n\t\tcreateReq := createTableRequest{\n\t\t\tDatabase:    dbName,\n\t\t\tName:        tableName,\n\t\t\tEngine:      strings.TrimSpace(r.FormValue(\"engine\")),\n\t\t\tOnCluster:   strings.TrimSpace(r.FormValue(\"on_cluster\")),\n\t\t\tIfNotExists: boolPtr(parseMultipartBool(r.FormValue(\"if_not_exists\"), true)),\n\t\t\tOrderBy:     strings.TrimSpace(r.FormValue(\"order_by\")),\n\t\t\tPartitionBy: strings.TrimSpace(r.FormValue(\"partition_by\")),\n\t\t\tPrimaryKey:  strings.TrimSpace(r.FormValue(\"primary_key\")),\n\t\t\tSampleBy:    strings.TrimSpace(r.FormValue(\"sample_by\")),\n\t\t\tTTL:         strings.TrimSpace(r.FormValue(\"ttl\")),\n\t\t\tSettings:    strings.TrimSpace(r.FormValue(\"settings\")),\n\t\t\tComment:     strings.TrimSpace(r.FormValue(\"comment\")),\n\t\t\tColumns:     make([]createTableColumn, 0, len(columns)),\n\t\t}\n\t\tfor _, col := range columns {\n\t\t\tcreateReq.Columns = append(createReq.Columns, createTableColumn{\n\t\t\t\tName: col.Name,\n\t\t\t\tType: col.Type,\n\t\t\t})\n\t\t}\n\n\t\tsql, err := buildCreateTableSQL(createReq)\n\t\tif err != nil {\n\t\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\t\treturn\n\t\t}\n\t\tcreateTableSQL = sql\n\n\t\tif _, err := h.Gateway.ExecuteQuery(\n\t\t\tsession.ConnectionID,\n\t\t\tsql,\n\t\t\tsession.ClickhouseUser,\n\t\t\tpassword,\n\t\t\t45*time.Second,\n\t\t); err != nil {\n\t\t\twriteError(w, http.StatusBadGateway, fmt.Sprintf(\"%s\\n\\nCreate table command:\\n%s\", err.Error(), truncateUploadCommand(sql, 3000)))\n\t\t\treturn\n\t\t}\n\t\tcreatedTable = true\n\t}\n\n\tinsertColumns, err := h.resolveInsertColumns(session, password, dbName, tableName, columns, createTable)\n\tif err != nil {\n\t\twriteError(w, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\tif len(insertColumns) == 0 {\n\t\twriteError(w, http.StatusBadRequest, \"No columns available for insert\")\n\t\treturn\n\t}\n\n\trowsInserted, insertPreviewSQL, err := h.insertJSONEachRowBatches(session, password, dbName, tableName, insertColumns, dataset.Rows)\n\tif err != nil {\n\t\tmessage := err.Error()\n\t\tif insertPreviewSQL != \"\" {\n\t\t\tmessage += \"\\n\\nInsert command preview:\\n\" + truncateUploadCommand(insertPreviewSQL, 3000)\n\t\t}\n\t\twriteError(w, http.StatusBadGateway, message)\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"schema.upload.ingest\",\n\t\tUsername:     strPtr(session.ClickhouseUser),\n\t\tConnectionID: strPtr(session.ConnectionID),\n\t\tDetails:      strPtr(fmt.Sprintf(\"file=%s format=%s target=%s.%s rows=%d created_table=%t\", filename, format, dbName, tableName, rowsInserted, createdTable)),\n\t\tIPAddress:    strPtr(r.RemoteAddr),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\":       true,\n\t\t\"database\":      dbName,\n\t\t\"table\":         tableName,\n\t\t\"rows_inserted\": rowsInserted,\n\t\t\"created_table\": createdTable,\n\t\t\"commands\": map[string]string{\n\t\t\t\"create_table\": createTableSQL,\n\t\t\t\"insert\":       insertPreviewSQL,\n\t\t},\n\t})\n}\n\nfunc readUploadFile(w http.ResponseWriter, r *http.Request) (filename, format string, payload []byte, err error) {\n\tr.Body = http.MaxBytesReader(w, r.Body, maxUploadBytes+1024*64)\n\tif err := r.ParseMultipartForm(maxUploadBytes); err != nil {\n\t\treturn \"\", \"\", nil, fmt.Errorf(\"invalid upload form: %w\", err)\n\t}\n\tif r.MultipartForm != nil {\n\t\tdefer r.MultipartForm.RemoveAll()\n\t}\n\n\tfile, header, err := r.FormFile(\"file\")\n\tif err != nil {\n\t\treturn \"\", \"\", nil, errors.New(\"file is required\")\n\t}\n\tdefer file.Close()\n\n\tfilename = strings.TrimSpace(header.Filename)\n\tif filename == \"\" {\n\t\tfilename = \"upload\"\n\t}\n\n\tformat, err = detectUploadFormat(filename, r.FormValue(\"format\"))\n\tif err != nil {\n\t\treturn \"\", \"\", nil, err\n\t}\n\n\treader := io.LimitReader(file, maxUploadBytes+1)\n\tpayload, err = io.ReadAll(reader)\n\tif err != nil {\n\t\treturn \"\", \"\", nil, fmt.Errorf(\"failed to read uploaded file: %w\", err)\n\t}\n\tif len(payload) == 0 {\n\t\treturn \"\", \"\", nil, errors.New(\"uploaded file is empty\")\n\t}\n\tif len(payload) > maxUploadBytes {\n\t\treturn \"\", \"\", nil, fmt.Errorf(\"file exceeds %d MB limit\", maxUploadBytes/(1024*1024))\n\t}\n\n\treturn filename, format, payload, nil\n}\n\nfunc detectUploadFormat(filename string, explicit string) (string, error) {\n\tif explicit != \"\" {\n\t\tswitch strings.ToLower(strings.TrimSpace(explicit)) {\n\t\tcase \"csv\", \"parquet\", \"json\", \"jsonl\":\n\t\t\treturn strings.ToLower(strings.TrimSpace(explicit)), nil\n\t\tdefault:\n\t\t\treturn \"\", errors.New(\"unsupported format: use csv, parquet, json, or jsonl\")\n\t\t}\n\t}\n\n\tswitch strings.ToLower(filepath.Ext(filename)) {\n\tcase \".csv\":\n\t\treturn \"csv\", nil\n\tcase \".parquet\":\n\t\treturn \"parquet\", nil\n\tcase \".json\":\n\t\treturn \"json\", nil\n\tcase \".jsonl\":\n\t\treturn \"jsonl\", nil\n\tdefault:\n\t\treturn \"\", errors.New(\"unsupported file type: only csv, parquet, json, and jsonl are allowed\")\n\t}\n}\n\nfunc parseUploadDataset(format string, payload []byte) (parsedUploadDataset, error) {\n\tswitch format {\n\tcase \"csv\":\n\t\treturn parseCSVDataset(payload)\n\tcase \"json\":\n\t\treturn parseJSONDataset(payload)\n\tcase \"jsonl\":\n\t\treturn parseJSONLinesDataset(payload)\n\tcase \"parquet\":\n\t\treturn parseParquetDataset(payload)\n\tdefault:\n\t\treturn parsedUploadDataset{}, errors.New(\"unsupported file type: only csv, parquet, json, and jsonl are allowed\")\n\t}\n}\n\nfunc parseCSVDataset(payload []byte) (parsedUploadDataset, error) {\n\treader := csv.NewReader(bytes.NewReader(payload))\n\treader.FieldsPerRecord = -1\n\treader.ReuseRecord = false\n\n\theader, err := reader.Read()\n\tif err != nil {\n\t\treturn parsedUploadDataset{}, errors.New(\"invalid csv file: missing header row\")\n\t}\n\theaders := normalizeCSVHeaders(header)\n\tif len(headers) == 0 {\n\t\treturn parsedUploadDataset{}, errors.New(\"invalid csv file: no columns found\")\n\t}\n\n\trows := make([]map[string]interface{}, 0, 512)\n\tfor {\n\t\trecord, err := reader.Read()\n\t\tif err == io.EOF {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn parsedUploadDataset{}, fmt.Errorf(\"invalid csv row: %w\", err)\n\t\t}\n\n\t\trow := make(map[string]interface{}, len(headers))\n\t\tfor i, col := range headers {\n\t\t\tif i >= len(record) {\n\t\t\t\trow[col] = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tvalue := strings.TrimSpace(record[i])\n\t\t\tif value == \"\" {\n\t\t\t\trow[col] = nil\n\t\t\t} else {\n\t\t\t\trow[col] = value\n\t\t\t}\n\t\t}\n\t\trows = append(rows, row)\n\t}\n\n\treturn parsedUploadDataset{\n\t\tRows:        rows,\n\t\tColumnOrder: headers,\n\t}, nil\n}\n\nfunc parseJSONDataset(payload []byte) (parsedUploadDataset, error) {\n\tvar raw interface{}\n\tif err := json.Unmarshal(payload, &raw); err != nil {\n\t\treturn parsedUploadDataset{}, fmt.Errorf(\"invalid json file: %w\", err)\n\t}\n\n\trows := make([]map[string]interface{}, 0, 512)\n\tswitch value := raw.(type) {\n\tcase []interface{}:\n\t\tfor _, item := range value {\n\t\t\trows = append(rows, normalizeRowFromAny(item))\n\t\t}\n\tcase map[string]interface{}:\n\t\tif dataField, ok := value[\"data\"]; ok {\n\t\t\tif arr, ok := dataField.([]interface{}); ok {\n\t\t\t\tfor _, item := range arr {\n\t\t\t\t\trows = append(rows, normalizeRowFromAny(item))\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\trows = append(rows, normalizeRowFromAny(value))\n\t\t\t}\n\t\t} else {\n\t\t\trows = append(rows, normalizeRowFromAny(value))\n\t\t}\n\tdefault:\n\t\trows = append(rows, normalizeRowFromAny(value))\n\t}\n\n\treturn parsedUploadDataset{\n\t\tRows: rows,\n\t}, nil\n}\n\nfunc parseJSONLinesDataset(payload []byte) (parsedUploadDataset, error) {\n\tscanner := bufio.NewScanner(bytes.NewReader(payload))\n\tscanner.Buffer(make([]byte, 0, 64*1024), 10*1024*1024)\n\n\trows := make([]map[string]interface{}, 0, 512)\n\tlineNo := 0\n\tfor scanner.Scan() {\n\t\tlineNo++\n\t\tline := strings.TrimSpace(scanner.Text())\n\t\tif line == \"\" {\n\t\t\tcontinue\n\t\t}\n\n\t\tvar raw interface{}\n\t\tif err := json.Unmarshal([]byte(line), &raw); err != nil {\n\t\t\treturn parsedUploadDataset{}, fmt.Errorf(\"invalid jsonl at line %d: %w\", lineNo, err)\n\t\t}\n\t\trows = append(rows, normalizeRowFromAny(raw))\n\t}\n\tif err := scanner.Err(); err != nil {\n\t\treturn parsedUploadDataset{}, fmt.Errorf(\"failed to read jsonl: %w\", err)\n\t}\n\n\treturn parsedUploadDataset{\n\t\tRows: rows,\n\t}, nil\n}\n\nfunc parseParquetDataset(payload []byte) (parsedUploadDataset, error) {\n\ttmp, err := os.CreateTemp(\"\", \"ch-ui-upload-*.parquet\")\n\tif err != nil {\n\t\treturn parsedUploadDataset{}, fmt.Errorf(\"failed to create temp file for parquet: %w\", err)\n\t}\n\ttmpPath := tmp.Name()\n\tdefer os.Remove(tmpPath)\n\n\t// Ensure restrictive permissions regardless of umask\n\tif err := os.Chmod(tmpPath, 0600); err != nil {\n\t\ttmp.Close()\n\t\treturn parsedUploadDataset{}, fmt.Errorf(\"failed to set temp file permissions: %w\", err)\n\t}\n\n\tif _, err := tmp.Write(payload); err != nil {\n\t\ttmp.Close()\n\t\treturn parsedUploadDataset{}, fmt.Errorf(\"failed to write parquet temp file: %w\", err)\n\t}\n\tif err := tmp.Close(); err != nil {\n\t\treturn parsedUploadDataset{}, fmt.Errorf(\"failed to finalize parquet temp file: %w\", err)\n\t}\n\n\tfr, err := local.NewLocalFileReader(tmpPath)\n\tif err != nil {\n\t\treturn parsedUploadDataset{}, fmt.Errorf(\"failed to open parquet file: %w\", err)\n\t}\n\tdefer fr.Close()\n\n\tpr, err := reader.NewParquetReader(fr, new(interface{}), 1)\n\tif err != nil {\n\t\treturn parsedUploadDataset{}, fmt.Errorf(\"failed to read parquet schema: %w\", err)\n\t}\n\tdefer pr.ReadStop()\n\n\ttotalRows := int(pr.GetNumRows())\n\tif totalRows == 0 {\n\t\treturn parsedUploadDataset{Rows: []map[string]interface{}{}}, nil\n\t}\n\n\trows := make([]map[string]interface{}, 0, totalRows)\n\tbatchSize := 512\n\tfor readCount := 0; readCount < totalRows; {\n\t\ttoRead := batchSize\n\t\tif totalRows-readCount < toRead {\n\t\t\ttoRead = totalRows - readCount\n\t\t}\n\t\tbatch := make([]interface{}, toRead)\n\t\tif err := pr.Read(&batch); err != nil {\n\t\t\treturn parsedUploadDataset{}, fmt.Errorf(\"failed to read parquet rows: %w\", err)\n\t\t}\n\n\t\tfor _, item := range batch {\n\t\t\trows = append(rows, normalizeRowFromAny(item))\n\t\t}\n\t\treadCount += len(batch)\n\t}\n\n\treturn parsedUploadDataset{\n\t\tRows: rows,\n\t}, nil\n}\n\nfunc normalizeCSVHeaders(header []string) []string {\n\tnormalized := make([]string, 0, len(header))\n\tused := map[string]int{}\n\tfor idx, raw := range header {\n\t\tname := strings.TrimSpace(raw)\n\t\tif idx == 0 {\n\t\t\tname = strings.TrimPrefix(name, \"\\uFEFF\")\n\t\t}\n\t\tif name == \"\" {\n\t\t\tname = fmt.Sprintf(\"column_%d\", idx+1)\n\t\t}\n\n\t\tbase := name\n\t\tcounter := used[base]\n\t\tif counter > 0 {\n\t\t\tname = fmt.Sprintf(\"%s_%d\", base, counter+1)\n\t\t}\n\t\tused[base] = counter + 1\n\t\tnormalized = append(normalized, name)\n\t}\n\treturn normalized\n}\n\nfunc normalizeRowFromAny(raw interface{}) map[string]interface{} {\n\tswitch value := raw.(type) {\n\tcase map[string]interface{}:\n\t\trow := make(map[string]interface{}, len(value))\n\t\tfor k, v := range value {\n\t\t\trow[strings.TrimSpace(k)] = normalizeUploadValue(v)\n\t\t}\n\t\treturn row\n\tcase map[interface{}]interface{}:\n\t\trow := make(map[string]interface{}, len(value))\n\t\tfor k, v := range value {\n\t\t\trow[fmt.Sprint(k)] = normalizeUploadValue(v)\n\t\t}\n\t\treturn row\n\tdefault:\n\t\t// Fallback for struct or scalar payloads.\n\t\tasMap := map[string]interface{}{}\n\t\trawJSON, err := json.Marshal(raw)\n\t\tif err == nil {\n\t\t\tif json.Unmarshal(rawJSON, &asMap) == nil && len(asMap) > 0 {\n\t\t\t\trow := make(map[string]interface{}, len(asMap))\n\t\t\t\tfor k, v := range asMap {\n\t\t\t\t\trow[strings.TrimSpace(k)] = normalizeUploadValue(v)\n\t\t\t\t}\n\t\t\t\treturn row\n\t\t\t}\n\t\t}\n\t\treturn map[string]interface{}{\"value\": normalizeUploadValue(raw)}\n\t}\n}\n\nfunc normalizeUploadValue(value interface{}) interface{} {\n\tswitch v := value.(type) {\n\tcase nil:\n\t\treturn nil\n\tcase bool:\n\t\treturn v\n\tcase string:\n\t\ttrimmed := strings.TrimSpace(v)\n\t\tif trimmed == \"\" {\n\t\t\treturn nil\n\t\t}\n\t\treturn trimmed\n\tcase json.Number:\n\t\tif i, err := v.Int64(); err == nil {\n\t\t\treturn i\n\t\t}\n\t\tif f, err := v.Float64(); err == nil {\n\t\t\treturn f\n\t\t}\n\t\treturn v.String()\n\tcase float32:\n\t\treturn float64(v)\n\tcase float64:\n\t\tif math.IsNaN(v) || math.IsInf(v, 0) {\n\t\t\treturn nil\n\t\t}\n\t\treturn v\n\tcase int:\n\t\treturn int64(v)\n\tcase int8:\n\t\treturn int64(v)\n\tcase int16:\n\t\treturn int64(v)\n\tcase int32:\n\t\treturn int64(v)\n\tcase int64:\n\t\treturn v\n\tcase uint:\n\t\treturn int64(v)\n\tcase uint8:\n\t\treturn int64(v)\n\tcase uint16:\n\t\treturn int64(v)\n\tcase uint32:\n\t\treturn int64(v)\n\tcase uint64:\n\t\tif v > math.MaxInt64 {\n\t\t\treturn fmt.Sprint(v)\n\t\t}\n\t\treturn int64(v)\n\tcase time.Time:\n\t\treturn v.UTC().Format(time.RFC3339Nano)\n\tcase []interface{}, map[string]interface{}:\n\t\traw, err := json.Marshal(v)\n\t\tif err != nil {\n\t\t\treturn fmt.Sprint(v)\n\t\t}\n\t\treturn string(raw)\n\tdefault:\n\t\traw, err := json.Marshal(v)\n\t\tif err != nil {\n\t\t\treturn fmt.Sprint(v)\n\t\t}\n\t\tif len(raw) > 0 && raw[0] == '{' {\n\t\t\treturn string(raw)\n\t\t}\n\t\tvar scalar interface{}\n\t\tif json.Unmarshal(raw, &scalar) == nil {\n\t\t\treturn normalizeUploadValue(scalar)\n\t\t}\n\t\treturn string(raw)\n\t}\n}\n\nfunc inferUploadColumns(dataset parsedUploadDataset) []uploadDiscoveredColumn {\n\torder := make([]string, 0, len(dataset.ColumnOrder))\n\tseen := map[string]struct{}{}\n\tfor _, name := range dataset.ColumnOrder {\n\t\ttrimmed := strings.TrimSpace(name)\n\t\tif trimmed == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tif _, exists := seen[trimmed]; exists {\n\t\t\tcontinue\n\t\t}\n\t\tseen[trimmed] = struct{}{}\n\t\torder = append(order, trimmed)\n\t}\n\n\textras := make([]string, 0)\n\tfor _, row := range dataset.Rows {\n\t\tfor key := range row {\n\t\t\ttrimmed := strings.TrimSpace(key)\n\t\t\tif trimmed == \"\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif _, exists := seen[trimmed]; exists {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tseen[trimmed] = struct{}{}\n\t\t\textras = append(extras, trimmed)\n\t\t}\n\t}\n\tsort.Strings(extras)\n\torder = append(order, extras...)\n\n\tcolumns := make([]uploadDiscoveredColumn, 0, len(order))\n\tfor _, name := range order {\n\t\tvalues := make([]interface{}, 0, len(dataset.Rows))\n\t\tsample := \"\"\n\t\tfor _, row := range dataset.Rows {\n\t\t\tv, ok := row[name]\n\t\t\tif !ok {\n\t\t\t\tvalues = append(values, nil)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tvalues = append(values, v)\n\t\t\tif sample == \"\" && v != nil {\n\t\t\t\tsample = fmt.Sprint(v)\n\t\t\t}\n\t\t}\n\t\tbaseType, nullable := inferUploadColumnType(values)\n\t\tcolumnType := baseType\n\t\tif nullable {\n\t\t\tcolumnType = fmt.Sprintf(\"Nullable(%s)\", baseType)\n\t\t}\n\t\tcolumns = append(columns, uploadDiscoveredColumn{\n\t\t\tName:     name,\n\t\t\tType:     columnType,\n\t\t\tNullable: nullable,\n\t\t\tSample:   sample,\n\t\t})\n\t}\n\treturn columns\n}\n\nfunc inferUploadColumnType(values []interface{}) (baseType string, nullable bool) {\n\tallBool := true\n\tallInt := true\n\tallFloat := true\n\tallDate := true\n\tallDateTime := true\n\thasValue := false\n\n\tfor _, raw := range values {\n\t\tif raw == nil {\n\t\t\tnullable = true\n\t\t\tcontinue\n\t\t}\n\n\t\tswitch v := raw.(type) {\n\t\tcase bool:\n\t\t\thasValue = true\n\t\t\tallInt = false\n\t\t\tallFloat = false\n\t\t\tallDate = false\n\t\t\tallDateTime = false\n\t\tcase int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:\n\t\t\thasValue = true\n\t\t\tallBool = false\n\t\t\tallDate = false\n\t\t\tallDateTime = false\n\t\tcase float32:\n\t\t\thasValue = true\n\t\t\tallBool = false\n\t\t\tallDate = false\n\t\t\tallDateTime = false\n\t\t\tif math.Trunc(float64(v)) != float64(v) {\n\t\t\t\tallInt = false\n\t\t\t}\n\t\tcase float64:\n\t\t\thasValue = true\n\t\t\tallBool = false\n\t\t\tallDate = false\n\t\t\tallDateTime = false\n\t\t\tif math.Trunc(v) != v {\n\t\t\t\tallInt = false\n\t\t\t}\n\t\tcase string:\n\t\t\ttrimmed := strings.TrimSpace(v)\n\t\t\tif trimmed == \"\" {\n\t\t\t\tnullable = true\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\thasValue = true\n\t\t\tif _, err := strconv.ParseBool(trimmed); err != nil && trimmed != \"0\" && trimmed != \"1\" {\n\t\t\t\tallBool = false\n\t\t\t}\n\t\t\tif _, err := strconv.ParseInt(trimmed, 10, 64); err != nil {\n\t\t\t\tallInt = false\n\t\t\t}\n\t\t\tif _, err := strconv.ParseFloat(trimmed, 64); err != nil {\n\t\t\t\tallFloat = false\n\t\t\t}\n\t\t\tif _, err := time.Parse(\"2006-01-02\", trimmed); err != nil {\n\t\t\t\tallDate = false\n\t\t\t}\n\t\t\tif !isDateTimeString(trimmed) {\n\t\t\t\tallDateTime = false\n\t\t\t}\n\t\tdefault:\n\t\t\thasValue = true\n\t\t\tallBool = false\n\t\t\tallInt = false\n\t\t\tallFloat = false\n\t\t\tallDate = false\n\t\t\tallDateTime = false\n\t\t}\n\t}\n\n\tif !hasValue {\n\t\treturn \"String\", true\n\t}\n\tswitch {\n\tcase allBool:\n\t\treturn \"Bool\", nullable\n\tcase allInt:\n\t\treturn \"Int64\", nullable\n\tcase allFloat:\n\t\treturn \"Float64\", nullable\n\tcase allDateTime:\n\t\treturn \"DateTime\", nullable\n\tcase allDate:\n\t\treturn \"Date\", nullable\n\tdefault:\n\t\treturn \"String\", nullable\n\t}\n}\n\nfunc isDateTimeString(value string) bool {\n\tlayouts := []string{\n\t\ttime.RFC3339,\n\t\ttime.RFC3339Nano,\n\t\t\"2006-01-02 15:04:05\",\n\t\t\"2006-01-02 15:04:05.000\",\n\t\t\"2006-01-02T15:04:05\",\n\t\t\"2006-01-02T15:04:05.000\",\n\t}\n\tfor _, layout := range layouts {\n\t\tif _, err := time.Parse(layout, value); err == nil {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}\n\nfunc parseUploadColumnsForm(raw string) ([]uploadDiscoveredColumn, error) {\n\ttrimmed := strings.TrimSpace(raw)\n\tif trimmed == \"\" {\n\t\treturn nil, nil\n\t}\n\tvar cols []uploadDiscoveredColumn\n\tif err := json.Unmarshal([]byte(trimmed), &cols); err != nil {\n\t\treturn nil, errors.New(\"invalid columns payload\")\n\t}\n\tresult := make([]uploadDiscoveredColumn, 0, len(cols))\n\tfor idx, col := range cols {\n\t\tname := strings.TrimSpace(col.Name)\n\t\tcolType := strings.TrimSpace(col.Type)\n\t\tif err := validateSimpleObjectName(name, fmt.Sprintf(\"column #%d\", idx+1)); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif colType == \"\" || isUnsafeSQLFragment(colType) {\n\t\t\treturn nil, fmt.Errorf(\"invalid type for column %q\", name)\n\t\t}\n\t\tresult = append(result, uploadDiscoveredColumn{\n\t\t\tName: name,\n\t\t\tType: colType,\n\t\t})\n\t}\n\treturn result, nil\n}\n\nfunc parseMultipartBool(raw string, defaultValue bool) bool {\n\ttrimmed := strings.TrimSpace(strings.ToLower(raw))\n\tif trimmed == \"\" {\n\t\treturn defaultValue\n\t}\n\tswitch trimmed {\n\tcase \"1\", \"true\", \"yes\", \"on\":\n\t\treturn true\n\tcase \"0\", \"false\", \"no\", \"off\":\n\t\treturn false\n\tdefault:\n\t\treturn defaultValue\n\t}\n}\n\nfunc boolPtr(value bool) *bool {\n\treturn &value\n}\n\nfunc buildCreateTableSQL(req createTableRequest) (string, error) {\n\tdbName := strings.TrimSpace(req.Database)\n\ttableName := strings.TrimSpace(req.Name)\n\tif err := validateSimpleObjectName(dbName, \"database\"); err != nil {\n\t\treturn \"\", err\n\t}\n\tif err := validateSimpleObjectName(tableName, \"table\"); err != nil {\n\t\treturn \"\", err\n\t}\n\tif isSystemDatabaseName(dbName) {\n\t\treturn \"\", errors.New(\"cannot create tables in system databases\")\n\t}\n\tif len(req.Columns) == 0 {\n\t\treturn \"\", errors.New(\"at least one column is required\")\n\t}\n\n\tengine := strings.TrimSpace(req.Engine)\n\tif engine == \"\" {\n\t\tengine = \"MergeTree\"\n\t}\n\tif isUnsafeSQLFragment(engine) {\n\t\treturn \"\", errors.New(\"invalid engine expression\")\n\t}\n\n\tcluster := strings.TrimSpace(req.OnCluster)\n\tif cluster != \"\" && isUnsafeSQLFragment(cluster) {\n\t\treturn \"\", errors.New(\"invalid cluster name\")\n\t}\n\n\tcolumnsSQL := make([]string, 0, len(req.Columns))\n\tfor i, col := range req.Columns {\n\t\tcolName := strings.TrimSpace(col.Name)\n\t\tcolType := strings.TrimSpace(col.Type)\n\t\tif err := validateSimpleObjectName(colName, fmt.Sprintf(\"column #%d\", i+1)); err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\tif colType == \"\" || isUnsafeSQLFragment(colType) {\n\t\t\treturn \"\", fmt.Errorf(\"invalid type for column %q\", colName)\n\t\t}\n\n\t\tpart := escapeIdentifier(colName) + \" \" + colType\n\t\tif def := strings.TrimSpace(col.DefaultExpression); def != \"\" {\n\t\t\tif isUnsafeSQLFragment(def) {\n\t\t\t\treturn \"\", fmt.Errorf(\"invalid default expression for column %q\", colName)\n\t\t\t}\n\t\t\tpart += \" DEFAULT \" + def\n\t\t}\n\t\tif comment := strings.TrimSpace(col.Comment); comment != \"\" {\n\t\t\tpart += \" COMMENT '\" + escapeLiteral(comment) + \"'\"\n\t\t}\n\t\tcolumnsSQL = append(columnsSQL, part)\n\t}\n\n\torderBy := strings.TrimSpace(req.OrderBy)\n\tpartitionBy := strings.TrimSpace(req.PartitionBy)\n\tprimaryKey := strings.TrimSpace(req.PrimaryKey)\n\tsampleBy := strings.TrimSpace(req.SampleBy)\n\tttl := strings.TrimSpace(req.TTL)\n\tsettings := strings.TrimSpace(req.Settings)\n\tcomment := strings.TrimSpace(req.Comment)\n\n\texpressions := []struct {\n\t\tname  string\n\t\tvalue string\n\t}{\n\t\t{name: \"order_by\", value: orderBy},\n\t\t{name: \"partition_by\", value: partitionBy},\n\t\t{name: \"primary_key\", value: primaryKey},\n\t\t{name: \"sample_by\", value: sampleBy},\n\t\t{name: \"ttl\", value: ttl},\n\t\t{name: \"settings\", value: settings},\n\t}\n\tfor _, expr := range expressions {\n\t\tif expr.value != \"\" && isUnsafeSQLFragment(expr.value) {\n\t\t\treturn \"\", fmt.Errorf(\"invalid %s expression\", expr.name)\n\t\t}\n\t}\n\n\tif strings.Contains(strings.ToLower(engine), \"mergetree\") && orderBy == \"\" {\n\t\torderBy = \"tuple()\"\n\t}\n\n\tifNotExists := req.IfNotExists == nil || *req.IfNotExists\n\n\tvar sqlBuilder strings.Builder\n\tsqlBuilder.WriteString(\"CREATE TABLE \")\n\tif ifNotExists {\n\t\tsqlBuilder.WriteString(\"IF NOT EXISTS \")\n\t}\n\tsqlBuilder.WriteString(escapeIdentifier(dbName))\n\tsqlBuilder.WriteString(\".\")\n\tsqlBuilder.WriteString(escapeIdentifier(tableName))\n\tif cluster != \"\" {\n\t\tsqlBuilder.WriteString(\" ON CLUSTER \")\n\t\tsqlBuilder.WriteString(escapeIdentifier(cluster))\n\t}\n\tsqlBuilder.WriteString(\" (\\n  \")\n\tsqlBuilder.WriteString(strings.Join(columnsSQL, \",\\n  \"))\n\tsqlBuilder.WriteString(\"\\n)\")\n\tsqlBuilder.WriteString(\"\\nENGINE = \")\n\tsqlBuilder.WriteString(engine)\n\tif partitionBy != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nPARTITION BY \")\n\t\tsqlBuilder.WriteString(partitionBy)\n\t}\n\tif orderBy != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nORDER BY \")\n\t\tsqlBuilder.WriteString(orderBy)\n\t}\n\tif primaryKey != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nPRIMARY KEY \")\n\t\tsqlBuilder.WriteString(primaryKey)\n\t}\n\tif sampleBy != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nSAMPLE BY \")\n\t\tsqlBuilder.WriteString(sampleBy)\n\t}\n\tif ttl != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nTTL \")\n\t\tsqlBuilder.WriteString(ttl)\n\t}\n\tif settings != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nSETTINGS \")\n\t\tsqlBuilder.WriteString(settings)\n\t}\n\tif comment != \"\" {\n\t\tsqlBuilder.WriteString(\"\\nCOMMENT '\")\n\t\tsqlBuilder.WriteString(escapeLiteral(comment))\n\t\tsqlBuilder.WriteString(\"'\")\n\t}\n\n\treturn sqlBuilder.String(), nil\n}\n\nfunc (h *QueryHandler) resolveInsertColumns(\n\tsession *middleware.SessionInfo,\n\tpassword string,\n\tdatabaseName string,\n\ttableName string,\n\tdiscovered []uploadDiscoveredColumn,\n\tcreateTable bool,\n) ([]uploadInsertColumn, error) {\n\tif createTable {\n\t\tcols := make([]uploadInsertColumn, 0, len(discovered))\n\t\tfor _, col := range discovered {\n\t\t\tname := strings.TrimSpace(col.Name)\n\t\t\tcolType := strings.TrimSpace(col.Type)\n\t\t\tif name == \"\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif colType == \"\" {\n\t\t\t\tcolType = \"String\"\n\t\t\t}\n\t\t\tcols = append(cols, uploadInsertColumn{\n\t\t\t\tName: name,\n\t\t\t\tType: colType,\n\t\t\t})\n\t\t}\n\t\treturn cols, nil\n\t}\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID,\n\t\tfmt.Sprintf(\"DESCRIBE TABLE %s.%s\", escapeIdentifier(databaseName), escapeIdentifier(tableName)),\n\t\tsession.ClickhouseUser,\n\t\tpassword,\n\t\t20*time.Second,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to read table schema: %w\", err)\n\t}\n\n\trows := decodeRows(result.Data)\n\ttableCols := make([]uploadInsertColumn, 0, len(rows))\n\tfor _, row := range rows {\n\t\trawName, ok := row[\"name\"]\n\t\tif !ok {\n\t\t\tcontinue\n\t\t}\n\t\tcolName := strings.TrimSpace(fmt.Sprint(rawName))\n\t\tif colName == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tcolType := strings.TrimSpace(fmt.Sprint(row[\"type\"]))\n\t\tif colType == \"\" {\n\t\t\tcolType = \"String\"\n\t\t}\n\t\ttableCols = append(tableCols, uploadInsertColumn{\n\t\t\tName: colName,\n\t\t\tType: colType,\n\t\t})\n\t}\n\tif len(tableCols) == 0 {\n\t\treturn nil, errors.New(\"target table has no columns\")\n\t}\n\n\tbyLower := make(map[string]uploadInsertColumn, len(tableCols))\n\tfor _, col := range tableCols {\n\t\tbyLower[strings.ToLower(col.Name)] = col\n\t}\n\n\tselected := make([]uploadInsertColumn, 0, len(discovered))\n\tused := map[string]struct{}{}\n\tfor _, col := range discovered {\n\t\tname := strings.TrimSpace(col.Name)\n\t\tif name == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tvar matched uploadInsertColumn\n\t\tfor _, tableCol := range tableCols {\n\t\t\tif tableCol.Name == name {\n\t\t\t\tmatched = tableCol\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif matched.Name == \"\" {\n\t\t\tif value, ok := byLower[strings.ToLower(name)]; ok {\n\t\t\t\tmatched = value\n\t\t\t}\n\t\t}\n\t\tif matched.Name == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tif _, exists := used[matched.Name]; exists {\n\t\t\tcontinue\n\t\t}\n\t\tused[matched.Name] = struct{}{}\n\t\tselected = append(selected, matched)\n\t}\n\n\tif len(selected) == 0 {\n\t\treturn nil, errors.New(\"no matching columns between uploaded file and target table\")\n\t}\n\n\treturn selected, nil\n}\n\nfunc (h *QueryHandler) insertJSONEachRowBatches(\n\tsession *middleware.SessionInfo,\n\tpassword string,\n\tdatabaseName string,\n\ttableName string,\n\ttargetColumns []uploadInsertColumn,\n\trows []map[string]interface{},\n) (int, string, error) {\n\tif len(rows) == 0 {\n\t\treturn 0, \"\", nil\n\t}\n\tconst batchSize = 500\n\tinserted := 0\n\tinsertPreview := \"\"\n\n\tfor start := 0; start < len(rows); start += batchSize {\n\t\tend := start + batchSize\n\t\tif end > len(rows) {\n\t\t\tend = len(rows)\n\t\t}\n\t\tquery, rowCount, err := buildJSONEachRowInsertQuery(databaseName, tableName, targetColumns, rows[start:end], start)\n\t\tif err != nil {\n\t\t\treturn inserted, insertPreview, err\n\t\t}\n\t\tif rowCount == 0 {\n\t\t\tcontinue\n\t\t}\n\t\tif insertPreview == \"\" {\n\t\t\tinsertPreview = query\n\t\t}\n\n\t\tif _, err := h.Gateway.ExecuteQuery(\n\t\t\tsession.ConnectionID,\n\t\t\tquery,\n\t\t\tsession.ClickhouseUser,\n\t\t\tpassword,\n\t\t\t90*time.Second,\n\t\t); err != nil {\n\t\t\treturn inserted, insertPreview, fmt.Errorf(\"insert failed after %d rows: %s\", inserted, humanizeUploadInsertError(err.Error()))\n\t\t}\n\t\tinserted += rowCount\n\t}\n\n\treturn inserted, insertPreview, nil\n}\n\nfunc buildJSONEachRowInsertQuery(\n\tdatabaseName string,\n\ttableName string,\n\ttargetColumns []uploadInsertColumn,\n\tsourceRows []map[string]interface{},\n\tbaseRowOffset int,\n) (string, int, error) {\n\tif len(targetColumns) == 0 {\n\t\treturn \"\", 0, errors.New(\"insert requires at least one column\")\n\t}\n\n\tseen := map[string]struct{}{}\n\tcolumns := make([]uploadInsertColumn, 0, len(targetColumns))\n\tfor _, col := range targetColumns {\n\t\tname := strings.TrimSpace(col.Name)\n\t\tif name == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tif _, exists := seen[name]; exists {\n\t\t\tcontinue\n\t\t}\n\t\tseen[name] = struct{}{}\n\t\ttyp := strings.TrimSpace(col.Type)\n\t\tif typ == \"\" {\n\t\t\ttyp = \"String\"\n\t\t}\n\t\tcolumns = append(columns, uploadInsertColumn{Name: name, Type: typ})\n\t}\n\tif len(columns) == 0 {\n\t\treturn \"\", 0, errors.New(\"insert requires at least one valid column\")\n\t}\n\n\tvar builder strings.Builder\n\tbuilder.WriteString(\"INSERT INTO \")\n\tbuilder.WriteString(escapeIdentifier(databaseName))\n\tbuilder.WriteString(\".\")\n\tbuilder.WriteString(escapeIdentifier(tableName))\n\tbuilder.WriteString(\" (\")\n\tfor idx, col := range columns {\n\t\tif idx > 0 {\n\t\t\tbuilder.WriteString(\", \")\n\t\t}\n\t\tbuilder.WriteString(escapeIdentifier(col.Name))\n\t}\n\tbuilder.WriteString(\") FORMAT JSONEachRow\\n\")\n\n\tinserted := 0\n\tfor rowIdx, source := range sourceRows {\n\t\trow := make(map[string]interface{}, len(columns))\n\t\thasData := false\n\t\tfor _, col := range columns {\n\t\t\tval, ok := source[col.Name]\n\t\t\tif !ok {\n\t\t\t\trow[col.Name] = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tcoerced, err := coerceUploadValueForType(val, col.Type)\n\t\t\tif err != nil {\n\t\t\t\treturn \"\", inserted, fmt.Errorf(\"row %d column %q: %w\", baseRowOffset+rowIdx+1, col.Name, err)\n\t\t\t}\n\t\t\trow[col.Name] = coerced\n\t\t\tif coerced != nil {\n\t\t\t\thasData = true\n\t\t\t}\n\t\t}\n\t\tif !hasData {\n\t\t\tcontinue\n\t\t}\n\t\tline, err := json.Marshal(row)\n\t\tif err != nil {\n\t\t\treturn \"\", inserted, fmt.Errorf(\"failed to encode row for insert: %w\", err)\n\t\t}\n\t\tbuilder.Write(line)\n\t\tbuilder.WriteByte('\\n')\n\t\tinserted++\n\t}\n\n\treturn builder.String(), inserted, nil\n}\n\nfunc humanizeUploadInsertError(message string) string {\n\tmsg := strings.TrimSpace(message)\n\tlower := strings.ToLower(msg)\n\tif strings.Contains(lower, \"cannot parse input\") {\n\t\treturn msg + \" Hint: adjust discovered column types or normalize date/time formats before upload.\"\n\t}\n\treturn msg\n}\n\nfunc truncateUploadCommand(sql string, limit int) string {\n\tif limit <= 0 || len(sql) <= limit {\n\t\treturn sql\n\t}\n\treturn sql[:limit] + \"\\n... (truncated)\"\n}\n\nfunc coerceUploadValueForType(value interface{}, typeExpr string) (interface{}, error) {\n\tif value == nil {\n\t\treturn nil, nil\n\t}\n\tbaseType := normalizeClickHouseType(typeExpr)\n\tif baseType == \"\" {\n\t\tbaseType = \"STRING\"\n\t}\n\n\tswitch {\n\tcase strings.Contains(baseType, \"BOOL\"):\n\t\tparsed, err := parseBoolUploadValue(value)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn parsed, nil\n\tcase strings.Contains(baseType, \"INT\"):\n\t\tparsed, err := parseIntUploadValue(value)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn parsed, nil\n\tcase strings.Contains(baseType, \"FLOAT\") || strings.Contains(baseType, \"DECIMAL\"):\n\t\tparsed, err := parseFloatUploadValue(value)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn parsed, nil\n\tcase strings.Contains(baseType, \"DATE\") && strings.Contains(baseType, \"TIME\"):\n\t\tparsed, err := parseDateTimeUploadValue(value)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn parsed, nil\n\tcase strings.HasPrefix(baseType, \"DATE\"):\n\t\tparsed, err := parseDateUploadValue(value)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn parsed, nil\n\tdefault:\n\t\treturn normalizeUploadValue(value), nil\n\t}\n}\n\nfunc normalizeClickHouseType(typeExpr string) string {\n\ttrimmed := strings.TrimSpace(typeExpr)\n\tif trimmed == \"\" {\n\t\treturn \"\"\n\t}\n\tupper := strings.ToUpper(trimmed)\n\tchanged := true\n\tfor changed {\n\t\tchanged = false\n\t\tfor _, wrapper := range []string{\"NULLABLE(\", \"LOWCARDINALITY(\"} {\n\t\t\tif strings.HasPrefix(upper, wrapper) && strings.HasSuffix(upper, \")\") {\n\t\t\t\tupper = strings.TrimSuffix(strings.TrimPrefix(upper, wrapper), \")\")\n\t\t\t\tchanged = true\n\t\t\t}\n\t\t}\n\t}\n\treturn strings.TrimSpace(upper)\n}\n\nfunc parseBoolUploadValue(value interface{}) (bool, error) {\n\tswitch v := value.(type) {\n\tcase bool:\n\t\treturn v, nil\n\tcase string:\n\t\ttrimmed := strings.TrimSpace(strings.ToLower(v))\n\t\tswitch trimmed {\n\t\tcase \"1\", \"true\", \"yes\", \"on\":\n\t\t\treturn true, nil\n\t\tcase \"0\", \"false\", \"no\", \"off\":\n\t\t\treturn false, nil\n\t\tdefault:\n\t\t\treturn false, fmt.Errorf(\"cannot parse %q as Bool\", v)\n\t\t}\n\tcase int64:\n\t\treturn v != 0, nil\n\tcase float64:\n\t\treturn v != 0, nil\n\tdefault:\n\t\tnormalized := normalizeUploadValue(value)\n\t\tif s, ok := normalized.(string); ok {\n\t\t\treturn parseBoolUploadValue(s)\n\t\t}\n\t\treturn false, fmt.Errorf(\"cannot parse %T as Bool\", value)\n\t}\n}\n\nfunc parseIntUploadValue(value interface{}) (int64, error) {\n\tswitch v := value.(type) {\n\tcase int64:\n\t\treturn v, nil\n\tcase int:\n\t\treturn int64(v), nil\n\tcase float64:\n\t\tif math.Trunc(v) != v {\n\t\t\treturn 0, fmt.Errorf(\"cannot parse non-integer %v as Int64\", v)\n\t\t}\n\t\treturn int64(v), nil\n\tcase string:\n\t\ttrimmed := strings.TrimSpace(v)\n\t\tif trimmed == \"\" {\n\t\t\treturn 0, errors.New(\"empty value\")\n\t\t}\n\t\ti, err := strconv.ParseInt(trimmed, 10, 64)\n\t\tif err != nil {\n\t\t\treturn 0, fmt.Errorf(\"cannot parse %q as Int64\", v)\n\t\t}\n\t\treturn i, nil\n\tdefault:\n\t\tnormalized := normalizeUploadValue(value)\n\t\tswitch nv := normalized.(type) {\n\t\tcase int64:\n\t\t\treturn nv, nil\n\t\tcase float64:\n\t\t\tif math.Trunc(nv) != nv {\n\t\t\t\treturn 0, fmt.Errorf(\"cannot parse non-integer %v as Int64\", nv)\n\t\t\t}\n\t\t\treturn int64(nv), nil\n\t\tcase string:\n\t\t\treturn parseIntUploadValue(nv)\n\t\tdefault:\n\t\t\treturn 0, fmt.Errorf(\"cannot parse %T as Int64\", value)\n\t\t}\n\t}\n}\n\nfunc parseFloatUploadValue(value interface{}) (float64, error) {\n\tswitch v := value.(type) {\n\tcase float64:\n\t\treturn v, nil\n\tcase float32:\n\t\treturn float64(v), nil\n\tcase int64:\n\t\treturn float64(v), nil\n\tcase int:\n\t\treturn float64(v), nil\n\tcase string:\n\t\ttrimmed := strings.TrimSpace(v)\n\t\tif trimmed == \"\" {\n\t\t\treturn 0, errors.New(\"empty value\")\n\t\t}\n\t\tf, err := strconv.ParseFloat(trimmed, 64)\n\t\tif err != nil {\n\t\t\treturn 0, fmt.Errorf(\"cannot parse %q as Float64\", v)\n\t\t}\n\t\treturn f, nil\n\tdefault:\n\t\tnormalized := normalizeUploadValue(value)\n\t\tswitch nv := normalized.(type) {\n\t\tcase float64:\n\t\t\treturn nv, nil\n\t\tcase int64:\n\t\t\treturn float64(nv), nil\n\t\tcase string:\n\t\t\treturn parseFloatUploadValue(nv)\n\t\tdefault:\n\t\t\treturn 0, fmt.Errorf(\"cannot parse %T as Float64\", value)\n\t\t}\n\t}\n}\n\nfunc parseDateUploadValue(value interface{}) (string, error) {\n\tt, err := parseFlexibleTime(value)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn t.Format(\"2006-01-02\"), nil\n}\n\nfunc parseDateTimeUploadValue(value interface{}) (string, error) {\n\tt, err := parseFlexibleTime(value)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn t.Format(\"2006-01-02 15:04:05\"), nil\n}\n\nfunc parseFlexibleTime(value interface{}) (time.Time, error) {\n\tswitch v := value.(type) {\n\tcase time.Time:\n\t\treturn v.UTC(), nil\n\tcase string:\n\t\ttrimmed := strings.TrimSpace(v)\n\t\tif trimmed == \"\" {\n\t\t\treturn time.Time{}, errors.New(\"empty time value\")\n\t\t}\n\t\tlayouts := []string{\n\t\t\ttime.RFC3339,\n\t\t\ttime.RFC3339Nano,\n\t\t\t\"2006-01-02 15:04:05\",\n\t\t\t\"2006-01-02 15:04:05.000\",\n\t\t\t\"2006-01-02T15:04:05\",\n\t\t\t\"2006-01-02T15:04:05.000\",\n\t\t\t\"2006-01-02\",\n\t\t}\n\t\tfor _, layout := range layouts {\n\t\t\tif t, err := time.Parse(layout, trimmed); err == nil {\n\t\t\t\treturn t.UTC(), nil\n\t\t\t}\n\t\t}\n\t\treturn time.Time{}, fmt.Errorf(\"cannot parse %q as Date/DateTime\", v)\n\tdefault:\n\t\tnormalized := normalizeUploadValue(value)\n\t\tif s, ok := normalized.(string); ok {\n\t\t\treturn parseFlexibleTime(s)\n\t\t}\n\t\treturn time.Time{}, fmt.Errorf(\"cannot parse %T as Date/DateTime\", value)\n\t}\n}\n"
  },
  {
    "path": "internal/server/handlers/saved_queries.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"strings\"\n\n\t\"github.com/go-chi/chi/v5\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n)\n\n// SavedQueriesHandler handles saved query CRUD operations.\ntype SavedQueriesHandler struct {\n\tDB *database.DB\n}\n\n// Routes registers saved query routes on the given router.\nfunc (h *SavedQueriesHandler) Routes(r chi.Router) {\n\tr.Get(\"/\", h.List)\n\tr.Get(\"/{id}\", h.Get)\n\tr.Post(\"/\", h.Create)\n\tr.Put(\"/{id}\", h.Update)\n\tr.Delete(\"/{id}\", h.Delete)\n\tr.Post(\"/{id}/duplicate\", h.Duplicate)\n}\n\n// List returns all saved queries.\nfunc (h *SavedQueriesHandler) List(w http.ResponseWriter, r *http.Request) {\n\tqueries, err := h.DB.GetSavedQueries()\n\tif err != nil {\n\t\tslog.Error(\"Failed to list saved queries\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch saved queries\"})\n\t\treturn\n\t}\n\n\tif queries == nil {\n\t\tqueries = []database.SavedQuery{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"saved_queries\": queries})\n}\n\n// Get returns a single saved query by ID.\nfunc (h *SavedQueriesHandler) Get(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Query ID is required\"})\n\t\treturn\n\t}\n\n\tquery, err := h.DB.GetSavedQueryByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get saved query\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch saved query\"})\n\t\treturn\n\t}\n\tif query == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Saved query not found\"})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, query)\n}\n\n// Create creates a new saved query.\nfunc (h *SavedQueriesHandler) Create(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName         string `json:\"name\"`\n\t\tDescription  string `json:\"description\"`\n\t\tQuery        string `json:\"query\"`\n\t\tConnectionID string `json:\"connection_id\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\tsqlQuery := strings.TrimSpace(body.Query)\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Name is required\"})\n\t\treturn\n\t}\n\tif sqlQuery == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Query is required\"})\n\t\treturn\n\t}\n\n\tconnectionID := strings.TrimSpace(body.ConnectionID)\n\tif connectionID == \"\" {\n\t\tconnectionID = session.ConnectionID\n\t}\n\n\tid, err := h.DB.CreateSavedQuery(database.CreateSavedQueryParams{\n\t\tName:         name,\n\t\tDescription:  strings.TrimSpace(body.Description),\n\t\tQuery:        sqlQuery,\n\t\tConnectionID: connectionID,\n\t\tCreatedBy:    session.ClickhouseUser,\n\t})\n\tif err != nil {\n\t\tslog.Error(\"Failed to create saved query\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create saved query\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"saved_query.created\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(name),\n\t})\n\n\tcreated, err := h.DB.GetSavedQueryByID(id)\n\tif err != nil || created == nil {\n\t\twriteJSON(w, http.StatusCreated, map[string]string{\"id\": id})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusCreated, created)\n}\n\n// Update updates an existing saved query.\nfunc (h *SavedQueriesHandler) Update(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Query ID is required\"})\n\t\treturn\n\t}\n\n\texisting, err := h.DB.GetSavedQueryByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get saved query for update\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch saved query\"})\n\t\treturn\n\t}\n\tif existing == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Saved query not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName         *string `json:\"name\"`\n\t\tDescription  *string `json:\"description\"`\n\t\tQuery        *string `json:\"query\"`\n\t\tConnectionID *string `json:\"connection_id\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := existing.Name\n\tdescription := \"\"\n\tif existing.Description != nil {\n\t\tdescription = *existing.Description\n\t}\n\tquery := existing.Query\n\tconnectionID := \"\"\n\tif existing.ConnectionID != nil {\n\t\tconnectionID = *existing.ConnectionID\n\t}\n\n\tchanged := false\n\tif body.Name != nil {\n\t\tn := strings.TrimSpace(*body.Name)\n\t\tif n == \"\" {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Name cannot be empty\"})\n\t\t\treturn\n\t\t}\n\t\tname = n\n\t\tchanged = true\n\t}\n\tif body.Description != nil {\n\t\tdescription = strings.TrimSpace(*body.Description)\n\t\tchanged = true\n\t}\n\tif body.Query != nil {\n\t\tq := strings.TrimSpace(*body.Query)\n\t\tif q == \"\" {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Query cannot be empty\"})\n\t\t\treturn\n\t\t}\n\t\tquery = q\n\t\tchanged = true\n\t}\n\tif body.ConnectionID != nil {\n\t\tconnectionID = strings.TrimSpace(*body.ConnectionID)\n\t\tchanged = true\n\t}\n\n\tif !changed {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"No valid fields to update\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.UpdateSavedQuery(id, name, description, query, connectionID); err != nil {\n\t\tslog.Error(\"Failed to update saved query\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update saved query\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"saved_query.updated\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(name),\n\t})\n\n\tupdated, err := h.DB.GetSavedQueryByID(id)\n\tif err != nil || updated == nil {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, updated)\n}\n\n// Delete removes a saved query.\nfunc (h *SavedQueriesHandler) Delete(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Query ID is required\"})\n\t\treturn\n\t}\n\n\texisting, err := h.DB.GetSavedQueryByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get saved query for delete\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch saved query\"})\n\t\treturn\n\t}\n\tif existing == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Saved query not found\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.DeleteSavedQuery(id); err != nil {\n\t\tslog.Error(\"Failed to delete saved query\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete saved query\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"saved_query.deleted\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(existing.Name),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\n// Duplicate creates a copy of an existing saved query.\nfunc (h *SavedQueriesHandler) Duplicate(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Query ID is required\"})\n\t\treturn\n\t}\n\n\toriginal, err := h.DB.GetSavedQueryByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get saved query for duplicate\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch saved query\"})\n\t\treturn\n\t}\n\tif original == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Saved query not found\"})\n\t\treturn\n\t}\n\n\tnewName := strings.TrimSpace(original.Name + \" (copy)\")\n\tdescription := \"\"\n\tif original.Description != nil {\n\t\tdescription = *original.Description\n\t}\n\tconnectionID := \"\"\n\tif original.ConnectionID != nil {\n\t\tconnectionID = *original.ConnectionID\n\t}\n\n\tnewID, err := h.DB.CreateSavedQuery(database.CreateSavedQueryParams{\n\t\tName:         newName,\n\t\tDescription:  description,\n\t\tQuery:        original.Query,\n\t\tConnectionID: connectionID,\n\t\tCreatedBy:    session.ClickhouseUser,\n\t})\n\tif err != nil {\n\t\tslog.Error(\"Failed to duplicate saved query\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to duplicate saved query\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"saved_query.duplicated\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(\"Duplicated from \" + id),\n\t})\n\n\tduplicated, err := h.DB.GetSavedQueryByID(newID)\n\tif err != nil || duplicated == nil {\n\t\twriteJSON(w, http.StatusCreated, map[string]string{\"id\": newID})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusCreated, duplicated)\n}\n"
  },
  {
    "path": "internal/server/handlers/schedules.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/go-chi/chi/v5\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/scheduler\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n)\n\n// SchedulesHandler handles scheduled job CRUD and execution.\ntype SchedulesHandler struct {\n\tDB      *database.DB\n\tGateway *tunnel.Gateway\n\tConfig  *config.Config\n}\n\n// Routes registers schedule routes on the given router.\nfunc (h *SchedulesHandler) Routes(r chi.Router) {\n\tr.Get(\"/\", h.List)\n\tr.Get(\"/{id}\", h.Get)\n\tr.Post(\"/\", h.Create)\n\tr.Put(\"/{id}\", h.Update)\n\tr.Delete(\"/{id}\", h.Delete)\n\tr.Get(\"/{id}/runs\", h.ListRuns)\n\tr.Post(\"/{id}/run\", h.ManualRun)\n}\n\n// List returns all scheduled jobs.\nfunc (h *SchedulesHandler) List(w http.ResponseWriter, r *http.Request) {\n\tschedules, err := h.DB.GetSchedules()\n\tif err != nil {\n\t\tslog.Error(\"Failed to list schedules\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch schedules\"})\n\t\treturn\n\t}\n\n\tif schedules == nil {\n\t\tschedules = []database.Schedule{}\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"schedules\": schedules})\n}\n\n// Get returns a single scheduled job by ID.\nfunc (h *SchedulesHandler) Get(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Schedule ID is required\"})\n\t\treturn\n\t}\n\n\tschedule, err := h.DB.GetScheduleByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get schedule\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch schedule\"})\n\t\treturn\n\t}\n\tif schedule == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Schedule not found\"})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, schedule)\n}\n\n// Create creates a new scheduled job.\nfunc (h *SchedulesHandler) Create(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName         string `json:\"name\"`\n\t\tCron         string `json:\"cron\"`\n\t\tSavedQueryID string `json:\"saved_query_id\"`\n\t\tConnectionID string `json:\"connection_id\"`\n\t\tTimezone     string `json:\"timezone\"`\n\t\tTimeoutMs    *int   `json:\"timeout_ms\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := strings.TrimSpace(body.Name)\n\tcronExpr := strings.TrimSpace(body.Cron)\n\tsavedQueryID := strings.TrimSpace(body.SavedQueryID)\n\tif name == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Name is required\"})\n\t\treturn\n\t}\n\tif cronExpr == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Cron expression is required\"})\n\t\treturn\n\t}\n\tif !scheduler.ValidateCron(cronExpr) {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid cron expression\"})\n\t\treturn\n\t}\n\tif savedQueryID == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Saved query ID is required\"})\n\t\treturn\n\t}\n\n\t// Verify saved query exists\n\tsavedQuery, err := h.DB.GetSavedQueryByID(savedQueryID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to verify saved query\", \"error\", err, \"saved_query_id\", savedQueryID)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to verify saved query\"})\n\t\treturn\n\t}\n\tif savedQuery == nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Saved query not found\"})\n\t\treturn\n\t}\n\n\ttimezone := strings.TrimSpace(body.Timezone)\n\tif timezone == \"\" {\n\t\ttimezone = \"UTC\"\n\t}\n\n\ttimeoutMs := 60000\n\tif body.TimeoutMs != nil && *body.TimeoutMs > 0 {\n\t\ttimeoutMs = *body.TimeoutMs\n\t}\n\n\tconnectionID := strings.TrimSpace(body.ConnectionID)\n\tif connectionID == \"\" {\n\t\tif savedQuery.ConnectionID != nil {\n\t\t\tconnectionID = *savedQuery.ConnectionID\n\t\t}\n\t\tif connectionID == \"\" {\n\t\t\tconnectionID = session.ConnectionID\n\t\t}\n\t}\n\n\tid, err := h.DB.CreateSchedule(name, savedQueryID, connectionID, cronExpr, timezone, session.ClickhouseUser, timeoutMs)\n\tif err != nil {\n\t\tslog.Error(\"Failed to create schedule\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to create schedule\"})\n\t\treturn\n\t}\n\n\t// Set next run time\n\tnext := scheduler.ComputeNextRun(cronExpr, time.Now().UTC())\n\tif next != nil {\n\t\th.DB.UpdateScheduleStatus(id, \"\", \"\", next)\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"schedule.created\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(name),\n\t})\n\n\tschedule, err := h.DB.GetScheduleByID(id)\n\tif err != nil || schedule == nil {\n\t\twriteJSON(w, http.StatusCreated, map[string]string{\"id\": id})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusCreated, schedule)\n}\n\n// Update updates an existing scheduled job.\nfunc (h *SchedulesHandler) Update(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Schedule ID is required\"})\n\t\treturn\n\t}\n\n\texisting, err := h.DB.GetScheduleByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get schedule for update\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch schedule\"})\n\t\treturn\n\t}\n\tif existing == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Scheduled job not found\"})\n\t\treturn\n\t}\n\n\tvar body struct {\n\t\tName      *string `json:\"name\"`\n\t\tCron      *string `json:\"cron\"`\n\t\tTimezone  *string `json:\"timezone\"`\n\t\tEnabled   *bool   `json:\"enabled\"`\n\t\tTimeoutMs *int    `json:\"timeout_ms\"`\n\t}\n\tif err := json.NewDecoder(r.Body).Decode(&body); err != nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid request body\"})\n\t\treturn\n\t}\n\n\tname := existing.Name\n\tcron := existing.Cron\n\ttimezone := existing.Timezone\n\tenabled := existing.Enabled\n\ttimeoutMs := existing.TimeoutMs\n\n\tchanged := false\n\tif body.Name != nil {\n\t\tn := strings.TrimSpace(*body.Name)\n\t\tif n == \"\" {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Name is required\"})\n\t\t\treturn\n\t\t}\n\t\tname = n\n\t\tchanged = true\n\t}\n\tif body.Cron != nil {\n\t\tc := strings.TrimSpace(*body.Cron)\n\t\tif c == \"\" || !scheduler.ValidateCron(c) {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Invalid cron expression\"})\n\t\t\treturn\n\t\t}\n\t\tcron = c\n\t\tchanged = true\n\t}\n\tif body.Timezone != nil {\n\t\ttz := strings.TrimSpace(*body.Timezone)\n\t\tif tz == \"\" {\n\t\t\ttz = \"UTC\"\n\t\t}\n\t\ttimezone = tz\n\t\tchanged = true\n\t}\n\tif body.Enabled != nil {\n\t\tenabled = *body.Enabled\n\t\tchanged = true\n\t}\n\tif body.TimeoutMs != nil {\n\t\tif *body.TimeoutMs <= 0 {\n\t\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"timeout_ms must be greater than 0\"})\n\t\t\treturn\n\t\t}\n\t\ttimeoutMs = *body.TimeoutMs\n\t\tchanged = true\n\t}\n\n\tif !changed {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"No valid fields to update\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.UpdateSchedule(id, name, cron, timezone, enabled, timeoutMs); err != nil {\n\t\tslog.Error(\"Failed to update schedule\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to update schedule\"})\n\t\treturn\n\t}\n\n\t// Recompute next run\n\tif enabled {\n\t\tnext := scheduler.ComputeNextRun(cron, time.Now().UTC())\n\t\th.DB.UpdateScheduleStatus(id, \"\", \"\", next)\n\t} else {\n\t\th.DB.UpdateScheduleStatus(id, \"\", \"\", nil)\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"schedule.updated\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(name),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\n// Delete removes a scheduled job.\nfunc (h *SchedulesHandler) Delete(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Schedule ID is required\"})\n\t\treturn\n\t}\n\n\texisting, err := h.DB.GetScheduleByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get schedule for delete\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch schedule\"})\n\t\treturn\n\t}\n\tif existing == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Scheduled job not found\"})\n\t\treturn\n\t}\n\n\tif err := h.DB.DeleteSchedule(id); err != nil {\n\t\tslog.Error(\"Failed to delete schedule\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to delete schedule\"})\n\t\treturn\n\t}\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"schedule.deleted\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(existing.Name),\n\t})\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\"success\": true})\n}\n\n// ListRuns returns the execution history for a scheduled job.\nfunc (h *SchedulesHandler) ListRuns(w http.ResponseWriter, r *http.Request) {\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Schedule ID is required\"})\n\t\treturn\n\t}\n\n\tschedule, err := h.DB.GetScheduleByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get schedule for runs\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch schedule\"})\n\t\treturn\n\t}\n\tif schedule == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Scheduled job not found\"})\n\t\treturn\n\t}\n\n\tlimit := 50\n\tif raw := r.URL.Query().Get(\"limit\"); raw != \"\" {\n\t\tif parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 && parsed <= 500 {\n\t\t\tlimit = parsed\n\t\t}\n\t}\n\toffset := 0\n\tif raw := r.URL.Query().Get(\"offset\"); raw != \"\" {\n\t\tif parsed, err := strconv.Atoi(raw); err == nil && parsed >= 0 && parsed <= 1000000 {\n\t\t\toffset = parsed\n\t\t}\n\t}\n\n\truns, err := h.DB.GetScheduleRuns(id, limit+1, offset)\n\tif err != nil {\n\t\tslog.Error(\"Failed to list schedule runs\", \"error\", err, \"schedule\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch runs\"})\n\t\treturn\n\t}\n\n\tif runs == nil {\n\t\truns = []database.ScheduleRun{}\n\t}\n\n\thasMore := len(runs) > limit\n\tif hasMore {\n\t\truns = runs[:limit]\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"runs\":        runs,\n\t\t\"has_more\":    hasMore,\n\t\t\"next_offset\": offset + len(runs),\n\t})\n}\n\n// ManualRun triggers a manual execution of a scheduled job.\nfunc (h *SchedulesHandler) ManualRun(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tid := chi.URLParam(r, \"id\")\n\tif id == \"\" {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Schedule ID is required\"})\n\t\treturn\n\t}\n\n\tschedule, err := h.DB.GetScheduleByID(id)\n\tif err != nil {\n\t\tslog.Error(\"Failed to get schedule for manual run\", \"error\", err, \"id\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch schedule\"})\n\t\treturn\n\t}\n\tif schedule == nil {\n\t\twriteJSON(w, http.StatusNotFound, map[string]string{\"error\": \"Scheduled job not found\"})\n\t\treturn\n\t}\n\n\t// Get the saved query\n\tsavedQuery, err := h.DB.GetSavedQueryByID(schedule.SavedQueryID)\n\tif err != nil {\n\t\tslog.Error(\"Failed to fetch saved query for manual run\", \"error\", err, \"saved_query_id\", schedule.SavedQueryID)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to fetch saved query\"})\n\t\treturn\n\t}\n\tif savedQuery == nil {\n\t\twriteJSON(w, http.StatusBadRequest, map[string]string{\"error\": \"Saved query not found\"})\n\t\treturn\n\t}\n\n\t// Determine connection\n\tconnectionID := session.ConnectionID\n\tif schedule.ConnectionID != nil && *schedule.ConnectionID != \"\" {\n\t\tconnectionID = *schedule.ConnectionID\n\t}\n\n\t// Decrypt credentials\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password for manual run\", \"error\", err, \"schedule\", id)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to decrypt credentials\"})\n\t\treturn\n\t}\n\n\ttimeout := time.Duration(schedule.TimeoutMs) * time.Millisecond\n\tif timeout <= 0 {\n\t\ttimeout = 60 * time.Second\n\t}\n\n\t// Create a run record\n\trunID, err := h.DB.CreateScheduleRun(id, \"running\")\n\tif err != nil {\n\t\tslog.Error(\"Failed to create schedule run\", \"error\", err, \"schedule\", id)\n\t}\n\n\t// Execute query\n\tstart := time.Now()\n\tresult, execErr := h.Gateway.ExecuteQuery(connectionID, savedQuery.Query, session.ClickhouseUser, password, timeout)\n\telapsed := time.Since(start).Milliseconds()\n\n\tstatus := \"success\"\n\tvar runErr string\n\trowCount := 0\n\tif execErr != nil {\n\t\tstatus = \"error\"\n\t\trunErr = execErr.Error()\n\t} else if result != nil {\n\t\trowCount = countRows(result.Data)\n\t}\n\n\t// Update run record\n\tif runID != \"\" {\n\t\th.DB.UpdateScheduleRun(runID, status, rowCount, int(elapsed), runErr)\n\t}\n\n\t// Update schedule status\n\tvar nextRun *time.Time\n\tif schedule.Enabled {\n\t\tnextRun = scheduler.ComputeNextRun(schedule.Cron, time.Now().UTC())\n\t}\n\th.DB.UpdateScheduleStatus(id, status, runErr, nextRun)\n\n\th.DB.CreateAuditLog(database.AuditLogParams{\n\t\tAction:   \"schedule.manual_run\",\n\t\tUsername: strPtr(session.ClickhouseUser),\n\t\tDetails:  strPtr(fmt.Sprintf(\"status=%s elapsed=%dms\", status, elapsed)),\n\t})\n\n\tif execErr != nil {\n\t\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\t\"success\": false,\n\t\t\t\"run_id\":  runID,\n\t\t\t\"status\":  \"error\",\n\t\t\t\"error\":   execErr.Error(),\n\t\t\t\"elapsed\": elapsed,\n\t\t})\n\t\treturn\n\t}\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"success\": true,\n\t\t\"run_id\":  runID,\n\t\t\"status\":  \"success\",\n\t\t\"elapsed\": elapsed,\n\t})\n}\n"
  },
  {
    "path": "internal/server/handlers/view_graph.go",
    "content": "package handlers\n\nimport (\n\t\"encoding/json\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"regexp\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/governance\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/google/uuid\"\n)\n\n// viewEntry holds a row from system.tables for a View or MaterializedView.\ntype viewEntry struct {\n\tDatabase         string `json:\"database\"`\n\tName             string `json:\"name\"`\n\tEngine           string `json:\"engine\"`\n\tCreateTableQuery string `json:\"create_table_query\"`\n}\n\n// Regex patterns for parsing SQL table references in view definitions.\n// These mirror the patterns in governance/lineage.go but are local to avoid\n// exporting internal regex details.\nconst vgTableRef = \"(\" +\n\t\"(?:`[^`]+`|[a-zA-Z_][a-zA-Z0-9_]*)\" +\n\t\"(?:\\\\.(?:`[^`]+`|[a-zA-Z_][a-zA-Z0-9_]*))?)\"\n\nvar (\n\tvgFromRe = regexp.MustCompile(`(?i)\\bFROM\\s+` + vgTableRef)\n\tvgJoinRe = regexp.MustCompile(`(?i)\\bJOIN\\s+` + vgTableRef)\n\tvgToRe   = regexp.MustCompile(`(?i)\\bTO\\s+` + vgTableRef)\n)\n\n// GetViewGraph queries ClickHouse for all materialized views and views,\n// parses their CREATE statements to build a structural dependency graph,\n// and returns it in the same LineageGraph format used by the lineage endpoints.\nfunc (h *GovernanceHandler) GetViewGraph(w http.ResponseWriter, r *http.Request) {\n\tsession := middleware.GetSession(r)\n\tif session == nil {\n\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\treturn\n\t}\n\n\tif !h.Gateway.IsTunnelOnline(session.ConnectionID) {\n\t\twriteJSON(w, http.StatusServiceUnavailable, map[string]string{\"error\": \"Tunnel is offline\"})\n\t\treturn\n\t}\n\n\tpassword, err := crypto.Decrypt(session.EncryptedPassword, h.Config.AppSecretKey)\n\tif err != nil {\n\t\tslog.Error(\"Failed to decrypt password\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to decrypt credentials\"})\n\t\treturn\n\t}\n\n\tsql := `SELECT database, name, engine, create_table_query\n\t\tFROM system.tables\n\t\tWHERE engine IN ('MaterializedView', 'View')\n\t\t  AND database NOT IN ('system', 'INFORMATION_SCHEMA', 'information_schema')\n\t\tFORMAT JSON`\n\n\tresult, err := h.Gateway.ExecuteQuery(\n\t\tsession.ConnectionID, sql,\n\t\tsession.ClickhouseUser, password,\n\t\t60*time.Second,\n\t)\n\tif err != nil {\n\t\tslog.Warn(\"Failed to query system.tables for views\", \"error\", err, \"connection\", session.ConnectionID)\n\t\twriteJSON(w, http.StatusBadGateway, map[string]string{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\t// Parse ClickHouse JSON response\n\traw, err := json.Marshal(result.Data)\n\tif err != nil {\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to marshal result\"})\n\t\treturn\n\t}\n\n\tvar rows []viewEntry\n\tif err := json.Unmarshal(raw, &rows); err != nil {\n\t\tslog.Error(\"Failed to parse view entries\", \"error\", err)\n\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Failed to parse view data\"})\n\t\treturn\n\t}\n\n\tgraph := buildViewGraph(rows)\n\n\twriteJSON(w, http.StatusOK, map[string]interface{}{\n\t\t\"graph\": graph,\n\t})\n}\n\n// buildViewGraph parses CREATE statements from view entries and constructs\n// a LineageGraph with nodes (tables, views, MVs) and edges (data flow).\nfunc buildViewGraph(rows []viewEntry) governance.LineageGraph {\n\tnodeMap := make(map[string]governance.LineageNode)\n\tvar edges []governance.LineageEdge\n\n\tfor _, row := range rows {\n\t\tviewKey := row.Database + \".\" + row.Name\n\t\tnodeType := \"materialized_view\"\n\t\tif strings.EqualFold(row.Engine, \"View\") {\n\t\t\tnodeType = \"view\"\n\t\t}\n\n\t\tnodeMap[viewKey] = governance.LineageNode{\n\t\t\tID:       viewKey,\n\t\t\tDatabase: row.Database,\n\t\t\tTable:    row.Name,\n\t\t\tType:     nodeType,\n\t\t}\n\n\t\tquery := normaliseWS(row.CreateTableQuery)\n\n\t\t// For materialized views: extract the TO target table\n\t\tvar toTarget *parsedRef\n\t\tif strings.EqualFold(row.Engine, \"MaterializedView\") {\n\t\t\ttoTarget = extractToTarget(query)\n\t\t}\n\n\t\t// Extract source tables from FROM/JOIN clauses in the AS SELECT part\n\t\tsources := extractViewSources(query)\n\n\t\t// Create edges: source → view\n\t\tfor _, src := range sources {\n\t\t\tif isSystemDB(src.db) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tsrcKey := src.key()\n\t\t\tif srcKey == viewKey {\n\t\t\t\tcontinue // skip self-reference\n\t\t\t}\n\n\t\t\t// Ensure source node exists\n\t\t\tif _, ok := nodeMap[srcKey]; !ok {\n\t\t\t\tnodeMap[srcKey] = governance.LineageNode{\n\t\t\t\t\tID:       srcKey,\n\t\t\t\t\tDatabase: src.db,\n\t\t\t\t\tTable:    src.table,\n\t\t\t\t\tType:     \"source\",\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tedges = append(edges, governance.LineageEdge{\n\t\t\t\tID:             uuid.New().String(),\n\t\t\t\tConnectionID:   \"\",\n\t\t\t\tSourceDatabase: src.db,\n\t\t\t\tSourceTable:    src.table,\n\t\t\t\tTargetDatabase: row.Database,\n\t\t\t\tTargetTable:    row.Name,\n\t\t\t\tEdgeType:       \"view_dependency\",\n\t\t\t})\n\t\t}\n\n\t\t// Create edge: MV → TO target\n\t\tif toTarget != nil && !isSystemDB(toTarget.db) {\n\t\t\ttgtKey := toTarget.key()\n\t\t\tif tgtKey != viewKey {\n\t\t\t\tif _, ok := nodeMap[tgtKey]; !ok {\n\t\t\t\t\tnodeMap[tgtKey] = governance.LineageNode{\n\t\t\t\t\t\tID:       tgtKey,\n\t\t\t\t\t\tDatabase: toTarget.db,\n\t\t\t\t\t\tTable:    toTarget.table,\n\t\t\t\t\t\tType:     \"target\",\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tedges = append(edges, governance.LineageEdge{\n\t\t\t\t\tID:             uuid.New().String(),\n\t\t\t\t\tConnectionID:   \"\",\n\t\t\t\t\tSourceDatabase: row.Database,\n\t\t\t\t\tSourceTable:    row.Name,\n\t\t\t\t\tTargetDatabase: toTarget.db,\n\t\t\t\t\tTargetTable:    toTarget.table,\n\t\t\t\t\tEdgeType:       \"materialized_to\",\n\t\t\t\t})\n\t\t\t}\n\t\t}\n\t}\n\n\tnodes := make([]governance.LineageNode, 0, len(nodeMap))\n\tfor _, n := range nodeMap {\n\t\tnodes = append(nodes, n)\n\t}\n\n\tif edges == nil {\n\t\tedges = []governance.LineageEdge{}\n\t}\n\n\treturn governance.LineageGraph{\n\t\tNodes: nodes,\n\t\tEdges: edges,\n\t}\n}\n\n// parsedRef is a database.table reference extracted from SQL.\ntype parsedRef struct {\n\tdb    string\n\ttable string\n}\n\nfunc (r parsedRef) key() string {\n\tif r.db == \"\" {\n\t\treturn r.table\n\t}\n\treturn r.db + \".\" + r.table\n}\n\n// extractToTarget extracts the TO target table from a materialized view definition.\nfunc extractToTarget(query string) *parsedRef {\n\tm := vgToRe.FindStringSubmatch(query)\n\tif len(m) < 2 {\n\t\treturn nil\n\t}\n\tdb, tbl := splitRef(m[1])\n\treturn &parsedRef{db: db, table: tbl}\n}\n\n// extractViewSources finds all FROM and JOIN table references in a view definition.\nfunc extractViewSources(query string) []parsedRef {\n\tseen := map[string]bool{}\n\tvar results []parsedRef\n\n\tcollect := func(re *regexp.Regexp) {\n\t\tfor _, m := range re.FindAllStringSubmatch(query, -1) {\n\t\t\tif len(m) < 2 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tdb, tbl := splitRef(m[1])\n\t\t\tif isSystemDB(db) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tkey := db + \".\" + tbl\n\t\t\tif seen[key] {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tseen[key] = true\n\t\t\tresults = append(results, parsedRef{db: db, table: tbl})\n\t\t}\n\t}\n\n\tcollect(vgFromRe)\n\tcollect(vgJoinRe)\n\n\treturn results\n}\n\n// splitRef splits a possibly qualified table reference into (database, table).\nfunc splitRef(raw string) (string, string) {\n\traw = stripBT(raw)\n\tparts := strings.SplitN(raw, \".\", 2)\n\tif len(parts) == 2 {\n\t\treturn stripBT(parts[0]), stripBT(parts[1])\n\t}\n\treturn \"\", stripBT(parts[0])\n}\n\n// stripBT removes surrounding backticks.\nfunc stripBT(s string) string {\n\tif len(s) >= 2 && s[0] == '`' && s[len(s)-1] == '`' {\n\t\treturn s[1 : len(s)-1]\n\t}\n\treturn s\n}\n\n// normaliseWS collapses whitespace runs into single spaces.\nfunc normaliseWS(s string) string {\n\treturn strings.Join(strings.Fields(s), \" \")\n}\n\n// isSystemDB returns true for ClickHouse system databases.\nfunc isSystemDB(db string) bool {\n\tswitch strings.ToLower(db) {\n\tcase \"system\", \"information_schema\":\n\t\treturn true\n\t}\n\treturn false\n}\n"
  },
  {
    "path": "internal/server/middleware/context.go",
    "content": "package middleware\n\nimport (\n\t\"context\"\n\t\"net/http\"\n)\n\ntype contextKey string\n\nconst (\n\tsessionKey contextKey = \"session\"\n)\n\n// SessionInfo holds session data stored in the request context.\ntype SessionInfo struct {\n\tID                string\n\tConnectionID      string\n\tClickhouseUser    string\n\tEncryptedPassword string\n\tUserRole          string\n}\n\n// SetSession stores the session in the request context.\nfunc SetSession(ctx context.Context, session *SessionInfo) context.Context {\n\treturn context.WithValue(ctx, sessionKey, session)\n}\n\n// GetSession retrieves the session from the request context.\nfunc GetSession(r *http.Request) *SessionInfo {\n\ts, _ := r.Context().Value(sessionKey).(*SessionInfo)\n\treturn s\n}\n"
  },
  {
    "path": "internal/server/middleware/cors.go",
    "content": "package middleware\n\nimport (\n\t\"net/http\"\n\t\"net/url\"\n\t\"strings\"\n)\n\n// CORSConfig holds CORS configuration.\ntype CORSConfig struct {\n\tAllowedOrigins []string\n\tAppURL         string\n\tDevMode        bool\n}\n\n// CORS returns a middleware that handles CORS headers.\nfunc CORS(cfg CORSConfig) func(http.Handler) http.Handler {\n\tvar appOrigin string\n\tif cfg.AppURL != \"\" {\n\t\tif u, err := url.Parse(cfg.AppURL); err == nil {\n\t\t\tappOrigin = u.Scheme + \"://\" + u.Host\n\t\t}\n\t}\n\n\tallowedSet := make(map[string]bool, len(cfg.AllowedOrigins))\n\tfor _, o := range cfg.AllowedOrigins {\n\t\tallowedSet[o] = true\n\t}\n\n\treturn func(next http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\torigin := r.Header.Get(\"Origin\")\n\n\t\t\tif origin != \"\" {\n\t\t\t\tallowed := false\n\n\t\t\t\t// In dev mode, allow any localhost origin\n\t\t\t\tif cfg.DevMode {\n\t\t\t\t\tif strings.HasPrefix(origin, \"http://localhost:\") ||\n\t\t\t\t\t\tstrings.HasPrefix(origin, \"http://127.0.0.1:\") {\n\t\t\t\t\t\tallowed = true\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Check explicit allowed origins\n\t\t\t\tif !allowed && allowedSet[origin] {\n\t\t\t\t\tallowed = true\n\t\t\t\t}\n\n\t\t\t\t// Check against APP_URL origin\n\t\t\t\tif !allowed && appOrigin != \"\" {\n\t\t\t\t\tcleaned := strings.TrimSuffix(origin, \"/\")\n\t\t\t\t\tif cleaned == appOrigin || cleaned == strings.TrimSuffix(appOrigin, \"/\") {\n\t\t\t\t\t\tallowed = true\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tif allowed {\n\t\t\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\t\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\t\t\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", \"GET, POST, PUT, DELETE, OPTIONS\")\n\t\t\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", \"Content-Type, Authorization, Cookie\")\n\t\t\t\t\tw.Header().Set(\"Vary\", \"Origin\")\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Handle preflight\n\t\t\tif r.Method == http.MethodOptions {\n\t\t\t\tw.WriteHeader(http.StatusNoContent)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tnext.ServeHTTP(w, r)\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "internal/server/middleware/license.go",
    "content": "package middleware\n\nimport (\n\t\"net/http\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n)\n\n// RequirePro returns a middleware that blocks access unless the installation\n// has an active Pro license. Returns 402 Payment Required otherwise.\nfunc RequirePro(cfg *config.Config) func(http.Handler) http.Handler {\n\treturn func(next http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tif !cfg.IsPro() {\n\t\t\t\twriteJSON(w, http.StatusPaymentRequired, map[string]string{\n\t\t\t\t\t\"error\": \"Pro license required\",\n\t\t\t\t})\n\t\t\t\treturn\n\t\t\t}\n\t\t\tnext.ServeHTTP(w, r)\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "internal/server/middleware/logging.go",
    "content": "package middleware\n\nimport (\n\t\"bufio\"\n\t\"log/slog\"\n\t\"net\"\n\t\"net/http\"\n\t\"time\"\n)\n\n// responseWriter wraps http.ResponseWriter to capture the status code.\ntype responseWriter struct {\n\thttp.ResponseWriter\n\tstatus int\n}\n\nfunc (rw *responseWriter) WriteHeader(code int) {\n\trw.status = code\n\trw.ResponseWriter.WriteHeader(code)\n}\n\n// Hijack proxies websocket/upgrade hijacking to the underlying writer.\nfunc (rw *responseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) {\n\thj, ok := rw.ResponseWriter.(http.Hijacker)\n\tif !ok {\n\t\treturn nil, nil, http.ErrNotSupported\n\t}\n\treturn hj.Hijack()\n}\n\n// Flush proxies streaming flushes to the underlying writer when available.\nfunc (rw *responseWriter) Flush() {\n\tif f, ok := rw.ResponseWriter.(http.Flusher); ok {\n\t\tf.Flush()\n\t}\n}\n\n// Push proxies HTTP/2 server push when available.\nfunc (rw *responseWriter) Push(target string, opts *http.PushOptions) error {\n\tif p, ok := rw.ResponseWriter.(http.Pusher); ok {\n\t\treturn p.Push(target, opts)\n\t}\n\treturn http.ErrNotSupported\n}\n\n// Logger returns a middleware that logs each request.\nfunc Logger(next http.Handler) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tstart := time.Now()\n\t\trw := &responseWriter{ResponseWriter: w, status: 200}\n\t\tnext.ServeHTTP(rw, r)\n\n\t\tslog.Debug(\"request\",\n\t\t\t\"method\", r.Method,\n\t\t\t\"path\", r.URL.Path,\n\t\t\t\"status\", rw.status,\n\t\t\t\"duration\", time.Since(start).String(),\n\t\t\t\"ip\", r.RemoteAddr,\n\t\t)\n\t})\n}\n"
  },
  {
    "path": "internal/server/middleware/ratelimit.go",
    "content": "package middleware\n\nimport (\n\t\"fmt\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n)\n\n// RateLimiter provides rate limiting backed by SQLite.\ntype RateLimiter struct {\n\tdb *database.DB\n}\n\n// NewRateLimiter creates a new rate limiter.\nfunc NewRateLimiter(db *database.DB) *RateLimiter {\n\treturn &RateLimiter{db: db}\n}\n\n// RateLimitResult holds the result of a rate limit check.\ntype RateLimitResult struct {\n\tAllowed     bool\n\tRetryAfter  time.Duration\n\tAttempts    int\n\tMaxAttempts int\n}\n\nvar lockoutSchedule = []time.Duration{\n\t3 * time.Minute,\n\t5 * time.Minute,\n\t10 * time.Minute,\n}\n\n// CheckAuthRateLimit checks if a login attempt is allowed.\n// Returns whether the attempt is allowed, and if not, how long to wait.\nfunc (rl *RateLimiter) CheckAuthRateLimit(identifier, limitType string, maxAttempts int, windowDuration time.Duration) RateLimitResult {\n\tentry, err := rl.db.GetRateLimit(identifier)\n\tif err != nil {\n\t\t// On error, allow the request\n\t\treturn RateLimitResult{Allowed: true, MaxAttempts: maxAttempts}\n\t}\n\n\tnow := time.Now()\n\n\tif entry != nil {\n\t\tbaseType, lockLevel := parseLimitTypeAndLockLevel(entry.Type, limitType)\n\t\tentryType := formatLimitTypeWithLockLevel(baseType, lockLevel)\n\n\t\t// Check if locked out\n\t\tif entry.LockedUntil != nil {\n\t\t\tlockedUntil, err := time.Parse(time.RFC3339, *entry.LockedUntil)\n\t\t\tif err == nil && now.Before(lockedUntil) {\n\t\t\t\t// Compatibility: normalize legacy long locks to the new capped schedule.\n\t\t\t\tactiveLevel := lockLevel\n\t\t\t\tif activeLevel <= 0 {\n\t\t\t\t\tactiveLevel = 1\n\t\t\t\t}\n\t\t\t\tmaxDuration := lockoutDurationForLevel(activeLevel)\n\t\t\t\tif remaining := time.Until(lockedUntil); remaining > maxDuration {\n\t\t\t\t\tnormalizedUntil := now.Add(maxDuration)\n\t\t\t\t\tlockedUntil = normalizedUntil\n\t\t\t\t\trl.db.UpsertRateLimit(\n\t\t\t\t\t\tidentifier,\n\t\t\t\t\t\tformatLimitTypeWithLockLevel(baseType, activeLevel),\n\t\t\t\t\t\tentry.Attempts,\n\t\t\t\t\t\tnow,\n\t\t\t\t\t\t&normalizedUntil,\n\t\t\t\t\t)\n\t\t\t\t}\n\t\t\t\treturn RateLimitResult{\n\t\t\t\t\tAllowed:     false,\n\t\t\t\t\tRetryAfter:  time.Until(lockedUntil),\n\t\t\t\t\tAttempts:    entry.Attempts,\n\t\t\t\t\tMaxAttempts: maxAttempts,\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Lock expired: keep escalation level, reset attempt window.\n\t\t\trl.db.UpsertRateLimit(identifier, entryType, 0, now, nil)\n\t\t\treturn RateLimitResult{Allowed: true, MaxAttempts: maxAttempts}\n\t\t}\n\n\t\t// Check if window expired\n\t\tfirstAttempt, err := time.Parse(time.RFC3339, entry.FirstAttemptAt)\n\t\tif err == nil && now.Sub(firstAttempt) > windowDuration {\n\t\t\t// Window expired, reset\n\t\t\t// Escalation is reset once the attempts window is clean.\n\t\t\trl.db.UpsertRateLimit(identifier, limitType, 0, now, nil)\n\t\t\treturn RateLimitResult{Allowed: true, MaxAttempts: maxAttempts}\n\t\t}\n\n\t\t// Check attempts\n\t\tif entry.Attempts >= maxAttempts {\n\t\t\t// Lock out\n\t\t\tnextLevel := nextLockoutLevel(lockLevel)\n\t\t\tlockoutDuration := lockoutDurationForLevel(nextLevel)\n\t\t\tlockedUntil := now.Add(lockoutDuration)\n\t\t\trl.db.UpsertRateLimit(\n\t\t\t\tidentifier,\n\t\t\t\tformatLimitTypeWithLockLevel(baseType, nextLevel),\n\t\t\t\tentry.Attempts,\n\t\t\t\tfirstAttempt,\n\t\t\t\t&lockedUntil,\n\t\t\t)\n\t\t\treturn RateLimitResult{\n\t\t\t\tAllowed:     false,\n\t\t\t\tRetryAfter:  lockoutDuration,\n\t\t\t\tAttempts:    entry.Attempts,\n\t\t\t\tMaxAttempts: maxAttempts,\n\t\t\t}\n\t\t}\n\t}\n\n\treturn RateLimitResult{Allowed: true, MaxAttempts: maxAttempts}\n}\n\n// RecordAttempt records a failed login attempt.\nfunc (rl *RateLimiter) RecordAttempt(identifier, limitType string) {\n\tentry, _ := rl.db.GetRateLimit(identifier)\n\n\tnow := time.Now()\n\tif entry == nil {\n\t\trl.db.UpsertRateLimit(identifier, limitType, 1, now, nil)\n\t\treturn\n\t}\n\n\tbaseType, lockLevel := parseLimitTypeAndLockLevel(entry.Type, limitType)\n\tfirstAttempt, err := time.Parse(time.RFC3339, entry.FirstAttemptAt)\n\tif err != nil {\n\t\tfirstAttempt = now\n\t}\n\trl.db.UpsertRateLimit(\n\t\tidentifier,\n\t\tformatLimitTypeWithLockLevel(baseType, lockLevel),\n\t\tentry.Attempts+1,\n\t\tfirstAttempt,\n\t\tnil,\n\t)\n}\n\n// ResetLimit resets the rate limit for an identifier.\nfunc (rl *RateLimiter) ResetLimit(identifier string) {\n\trl.db.DeleteRateLimit(identifier)\n}\n\nfunc parseLimitTypeAndLockLevel(storedType, fallback string) (string, int) {\n\ttrimmed := strings.TrimSpace(storedType)\n\tif trimmed == \"\" {\n\t\treturn fallback, 0\n\t}\n\n\tparts := strings.SplitN(trimmed, \":\", 2)\n\tbase := strings.TrimSpace(parts[0])\n\tif base == \"\" {\n\t\tbase = fallback\n\t}\n\tif len(parts) == 1 {\n\t\treturn base, 0\n\t}\n\n\tlevel, err := strconv.Atoi(strings.TrimSpace(parts[1]))\n\tif err != nil || level < 0 {\n\t\treturn base, 0\n\t}\n\treturn base, level\n}\n\nfunc formatLimitTypeWithLockLevel(base string, level int) string {\n\tif level <= 0 {\n\t\treturn base\n\t}\n\treturn fmt.Sprintf(\"%s:%d\", base, level)\n}\n\nfunc nextLockoutLevel(current int) int {\n\tnext := current + 1\n\tif next < 1 {\n\t\tnext = 1\n\t}\n\tif next > len(lockoutSchedule) {\n\t\tnext = len(lockoutSchedule)\n\t}\n\treturn next\n}\n\nfunc lockoutDurationForLevel(level int) time.Duration {\n\tif level <= 1 {\n\t\treturn lockoutSchedule[0]\n\t}\n\tif level > len(lockoutSchedule) {\n\t\treturn lockoutSchedule[len(lockoutSchedule)-1]\n\t}\n\treturn lockoutSchedule[level-1]\n}\n"
  },
  {
    "path": "internal/server/middleware/ratelimit_test.go",
    "content": "package middleware\n\nimport (\n\t\"path/filepath\"\n\t\"testing\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n)\n\nfunc TestProgressiveLockoutSchedule(t *testing.T) {\n\tdbPath := filepath.Join(t.TempDir(), \"rate-limit.db\")\n\tdb, err := database.Open(dbPath)\n\tif err != nil {\n\t\tt.Fatalf(\"open db: %v\", err)\n\t}\n\tdefer db.Close()\n\n\trl := NewRateLimiter(db)\n\tidentifier := \"user:test\"\n\tmaxAttempts := 3\n\twindow := 15 * time.Minute\n\n\texpectLocked := func(expectedMin time.Duration, expectedMax time.Duration, expectedType string) {\n\t\tt.Helper()\n\t\tres := rl.CheckAuthRateLimit(identifier, \"user\", maxAttempts, window)\n\t\tif res.Allowed {\n\t\t\tt.Fatalf(\"expected blocked result for %s\", expectedType)\n\t\t}\n\t\tif res.RetryAfter < expectedMin || res.RetryAfter > expectedMax {\n\t\t\tt.Fatalf(\"retryAfter out of range: got=%s want=[%s,%s]\", res.RetryAfter, expectedMin, expectedMax)\n\t\t}\n\t\tentry, err := db.GetRateLimit(identifier)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"get rate limit: %v\", err)\n\t\t}\n\t\tif entry == nil {\n\t\t\tt.Fatalf(\"expected persisted rate limit entry\")\n\t\t}\n\t\tif entry.Type != expectedType {\n\t\t\tt.Fatalf(\"unexpected entry type: got=%q want=%q\", entry.Type, expectedType)\n\t\t}\n\t\tif entry.LockedUntil == nil {\n\t\t\tt.Fatalf(\"expected locked_until to be set\")\n\t\t}\n\t}\n\n\texpireLock := func(expectedType string) {\n\t\tt.Helper()\n\t\tnow := time.Now()\n\t\texpired := now.Add(-1 * time.Second)\n\t\tif err := db.UpsertRateLimit(identifier, expectedType, maxAttempts, now.Add(-2*time.Minute), &expired); err != nil {\n\t\t\tt.Fatalf(\"upsert expired lock: %v\", err)\n\t\t}\n\t\tres := rl.CheckAuthRateLimit(identifier, \"user\", maxAttempts, window)\n\t\tif !res.Allowed {\n\t\t\tt.Fatalf(\"expected allowed result after lock expiry, got blocked retryAfter=%s\", res.RetryAfter)\n\t\t}\n\t\tentry, err := db.GetRateLimit(identifier)\n\t\tif err != nil {\n\t\t\tt.Fatalf(\"get rate limit after expiry: %v\", err)\n\t\t}\n\t\tif entry == nil {\n\t\t\tt.Fatalf(\"expected rate limit entry after lock expiry\")\n\t\t}\n\t\tif entry.Type != expectedType {\n\t\t\tt.Fatalf(\"unexpected type after expiry: got=%q want=%q\", entry.Type, expectedType)\n\t\t}\n\t\tif entry.Attempts != 0 {\n\t\t\tt.Fatalf(\"attempts should reset after expiry: got=%d\", entry.Attempts)\n\t\t}\n\t\tif entry.LockedUntil != nil {\n\t\t\tt.Fatalf(\"expected lock to be cleared after expiry\")\n\t\t}\n\t}\n\n\tif err := db.UpsertRateLimit(identifier, \"user\", maxAttempts, time.Now(), nil); err != nil {\n\t\tt.Fatalf(\"seed first lock: %v\", err)\n\t}\n\texpectLocked(3*time.Minute-5*time.Second, 3*time.Minute+5*time.Second, \"user:1\")\n\n\texpireLock(\"user:1\")\n\tif err := db.UpsertRateLimit(identifier, \"user:1\", maxAttempts, time.Now(), nil); err != nil {\n\t\tt.Fatalf(\"seed second lock: %v\", err)\n\t}\n\texpectLocked(5*time.Minute-5*time.Second, 5*time.Minute+5*time.Second, \"user:2\")\n\n\texpireLock(\"user:2\")\n\tif err := db.UpsertRateLimit(identifier, \"user:2\", maxAttempts, time.Now(), nil); err != nil {\n\t\tt.Fatalf(\"seed third lock: %v\", err)\n\t}\n\texpectLocked(10*time.Minute-5*time.Second, 10*time.Minute+5*time.Second, \"user:3\")\n\n\texpireLock(\"user:3\")\n\tif err := db.UpsertRateLimit(identifier, \"user:3\", maxAttempts, time.Now(), nil); err != nil {\n\t\tt.Fatalf(\"seed capped lock: %v\", err)\n\t}\n\texpectLocked(10*time.Minute-5*time.Second, 10*time.Minute+5*time.Second, \"user:3\")\n}\n\nfunc TestLegacyLongLockIsCappedToCurrentSchedule(t *testing.T) {\n\tdbPath := filepath.Join(t.TempDir(), \"rate-limit-legacy.db\")\n\tdb, err := database.Open(dbPath)\n\tif err != nil {\n\t\tt.Fatalf(\"open db: %v\", err)\n\t}\n\tdefer db.Close()\n\n\trl := NewRateLimiter(db)\n\tidentifier := \"user:legacy\"\n\tnow := time.Now()\n\tlegacyUntil := now.Add(2 * time.Hour)\n\tif err := db.UpsertRateLimit(identifier, \"user:3\", 3, now.Add(-1*time.Minute), &legacyUntil); err != nil {\n\t\tt.Fatalf(\"seed legacy lock: %v\", err)\n\t}\n\n\tres := rl.CheckAuthRateLimit(identifier, \"user\", 3, 15*time.Minute)\n\tif res.Allowed {\n\t\tt.Fatalf(\"expected request to remain blocked during capped lock window\")\n\t}\n\tif res.RetryAfter > 10*time.Minute+5*time.Second {\n\t\tt.Fatalf(\"legacy lock should be capped to 10m, got retryAfter=%s\", res.RetryAfter)\n\t}\n\n\tentry, err := db.GetRateLimit(identifier)\n\tif err != nil {\n\t\tt.Fatalf(\"get rate limit: %v\", err)\n\t}\n\tif entry == nil || entry.LockedUntil == nil {\n\t\tt.Fatalf(\"expected normalized lock to be persisted\")\n\t}\n\n\tnormalizedUntil, err := time.Parse(time.RFC3339, *entry.LockedUntil)\n\tif err != nil {\n\t\tt.Fatalf(\"parse locked_until: %v\", err)\n\t}\n\tif normalizedUntil.After(time.Now().Add(10*time.Minute + 5*time.Second)) {\n\t\tt.Fatalf(\"persisted lock should be capped near now+10m, got %s\", normalizedUntil)\n\t}\n}\n"
  },
  {
    "path": "internal/server/middleware/security.go",
    "content": "package middleware\n\nimport \"net/http\"\n\n// SecurityHeaders adds security headers to responses.\nfunc SecurityHeaders(isProduction bool) func(http.Handler) http.Handler {\n\treturn func(next http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tw.Header().Set(\"X-Content-Type-Options\", \"nosniff\")\n\t\t\tw.Header().Set(\"X-Frame-Options\", \"DENY\")\n\t\t\tw.Header().Set(\"Referrer-Policy\", \"strict-origin-when-cross-origin\")\n\t\t\tw.Header().Set(\"Permissions-Policy\", \"geolocation=(), microphone=(), camera=()\")\n\n\t\t\t// CSP is applied in all modes to mitigate XSS attacks.\n\t\t\tw.Header().Set(\"Content-Security-Policy\",\n\t\t\t\t\"default-src 'self'; script-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; img-src 'self' data: blob:; connect-src 'self' https://api.openai.com; font-src 'self' data:; frame-ancestors 'none'; base-uri 'self'; form-action 'self';\")\n\n\t\t\tif isProduction {\n\t\t\t\tw.Header().Set(\"Strict-Transport-Security\", \"max-age=31536000; includeSubDomains; preload\")\n\t\t\t}\n\n\t\t\tnext.ServeHTTP(w, r)\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "internal/server/middleware/session.go",
    "content": "package middleware\n\nimport (\n\t\"encoding/json\"\n\t\"log/slog\"\n\t\"net/http\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n)\n\n// writeJSON writes a JSON response.\nfunc writeJSON(w http.ResponseWriter, status int, v interface{}) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(status)\n\tjson.NewEncoder(w).Encode(v)\n}\n\n// Session returns a middleware that validates the chui_session cookie.\nfunc Session(db *database.DB, _ *tunnel.Gateway) func(http.Handler) http.Handler {\n\treturn func(next http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tcookie, err := r.Cookie(\"chui_session\")\n\t\t\tif err != nil || cookie.Value == \"\" {\n\t\t\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Not authenticated\"})\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tsession, err := db.GetSession(cookie.Value)\n\t\t\tif err != nil {\n\t\t\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Session lookup failed\"})\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif session == nil {\n\t\t\t\twriteJSON(w, http.StatusUnauthorized, map[string]string{\"error\": \"Session expired or invalid\"})\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\trole := \"viewer\"\n\t\t\toverrideRole, err := db.GetUserRole(session.ClickhouseUser)\n\t\t\tif err != nil {\n\t\t\t\tslog.Warn(\"Failed to resolve explicit user role\", \"user\", session.ClickhouseUser, \"error\", err)\n\t\t\t} else if overrideRole != \"\" {\n\t\t\t\trole = overrideRole\n\t\t\t} else if session.UserRole != nil && *session.UserRole != \"\" {\n\t\t\t\trole = *session.UserRole\n\t\t\t}\n\n\t\t\tinfo := &SessionInfo{\n\t\t\t\tID:                session.ID,\n\t\t\t\tConnectionID:      session.ConnectionID,\n\t\t\t\tClickhouseUser:    session.ClickhouseUser,\n\t\t\t\tEncryptedPassword: session.EncryptedPassword,\n\t\t\t\tUserRole:          role,\n\t\t\t}\n\n\t\t\tctx := SetSession(r.Context(), info)\n\t\t\tnext.ServeHTTP(w, r.WithContext(ctx))\n\t\t})\n\t}\n}\n\n// RequireAdmin returns a middleware that requires admin role.\nfunc RequireAdmin(db *database.DB) func(http.Handler) http.Handler {\n\treturn func(next http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tsession := GetSession(r)\n\t\t\tif session == nil {\n\t\t\t\twriteJSON(w, http.StatusForbidden, map[string]string{\"error\": \"Admin access required\"})\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tisAdmin, err := db.IsUserRole(session.ClickhouseUser, \"admin\")\n\t\t\tif err != nil {\n\t\t\t\twriteJSON(w, http.StatusInternalServerError, map[string]string{\"error\": \"Role check failed\"})\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif !isAdmin {\n\t\t\t\twriteJSON(w, http.StatusForbidden, map[string]string{\"error\": \"Admin access required\"})\n\t\t\t\treturn\n\t\t\t}\n\t\t\tnext.ServeHTTP(w, r)\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "internal/server/server.go",
    "content": "package server\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"io/fs\"\n\t\"log/slog\"\n\t\"net/http\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/alerts\"\n\t\"github.com/caioricciuti/ch-ui/internal/config\"\n\t\"github.com/caioricciuti/ch-ui/internal/crypto\"\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n\t\"github.com/caioricciuti/ch-ui/internal/governance\"\n\t\"github.com/caioricciuti/ch-ui/internal/langfuse\"\n\t\"github.com/caioricciuti/ch-ui/internal/models\"\n\t\"github.com/caioricciuti/ch-ui/internal/pipelines\"\n\t\"github.com/caioricciuti/ch-ui/internal/scheduler\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/handlers\"\n\t\"github.com/caioricciuti/ch-ui/internal/server/middleware\"\n\t\"github.com/caioricciuti/ch-ui/internal/tunnel\"\n\t\"github.com/go-chi/chi/v5\"\n)\n\n// Server is the main HTTP server.\ntype Server struct {\n\tcfg            *config.Config\n\tdb             *database.DB\n\tgateway        *tunnel.Gateway\n\tscheduler      *scheduler.Runner\n\tpipelineRunner *pipelines.Runner\n\tmodelRunner    *models.Runner\n\tmodelScheduler *models.Scheduler\n\tgovSyncer      *governance.Syncer\n\tguardrails     *governance.GuardrailService\n\talerts         *alerts.Dispatcher\n\tlangfuse       *langfuse.Client\n\trouter         chi.Router\n\thttp           *http.Server\n\tfrontendFS     fs.FS\n}\n\n// New creates a new Server with all routes configured.\nfunc New(cfg *config.Config, db *database.DB, frontendFS fs.FS) *Server {\n\tr := chi.NewRouter()\n\tgw := tunnel.NewGateway(db)\n\n\tsched := scheduler.NewRunner(db, gw, cfg.AppSecretKey)\n\tpipeRunner := pipelines.NewRunner(db, gw, cfg)\n\tmodelRunner := models.NewRunner(db, gw, cfg.AppSecretKey)\n\tmodelScheduler := models.NewScheduler(db, modelRunner)\n\n\tgovStore := governance.NewStore(db)\n\tgovSyncer := governance.NewSyncer(govStore, db, gw, cfg.AppSecretKey)\n\talertDispatcher := alerts.NewDispatcher(db, cfg)\n\tlfClient := langfuse.New()\n\n\t// Load Langfuse config from database settings (if configured via admin UI)\n\tif lfCfg, err := loadLangfuseConfigFromDB(db, cfg.AppSecretKey); err != nil {\n\t\tslog.Warn(\"Failed to load Langfuse config from database\", \"error\", err)\n\t} else if lfCfg.Enabled() {\n\t\tlfClient.Reconfigure(lfCfg)\n\t}\n\n\ts := &Server{\n\t\tcfg:            cfg,\n\t\tdb:             db,\n\t\tgateway:        gw,\n\t\tscheduler:      sched,\n\t\tpipelineRunner: pipeRunner,\n\t\tmodelRunner:    modelRunner,\n\t\tmodelScheduler: modelScheduler,\n\t\tgovSyncer:      govSyncer,\n\t\tguardrails:     governance.NewGuardrailService(govStore, db),\n\t\talerts:         alertDispatcher,\n\t\tlangfuse:       lfClient,\n\t\trouter:         r,\n\t\tfrontendFS:     frontendFS,\n\t}\n\n\ts.setupRoutes()\n\n\ts.http = &http.Server{\n\t\tAddr:         fmt.Sprintf(\":%d\", cfg.Port),\n\t\tHandler:      r,\n\t\tReadTimeout:  30 * time.Second,\n\t\tWriteTimeout: 5 * time.Minute, // Long for streaming SSE/queries\n\t\tIdleTimeout:  120 * time.Second,\n\t}\n\n\treturn s\n}\n\nfunc (s *Server) setupRoutes() {\n\tr := s.router\n\tcfg := s.cfg\n\tdb := s.db\n\tgw := s.gateway\n\n\t// ── Global middleware ────────────────────────────────────────────────\n\tr.Use(middleware.Logger)\n\tr.Use(middleware.SecurityHeaders(!cfg.DevMode))\n\tr.Use(middleware.CORS(middleware.CORSConfig{\n\t\tDevMode:        cfg.DevMode,\n\t\tAllowedOrigins: cfg.AllowedOrigins,\n\t\tAppURL:         cfg.AppURL,\n\t}))\n\n\t// ── Health check (no auth) ──────────────────────────────────────────\n\thealthHandler := &handlers.HealthHandler{}\n\tr.Get(\"/health\", healthHandler.Health)\n\n\t// ── WebSocket tunnel endpoint (agent authenticates via token) ────────\n\tr.HandleFunc(\"/connect\", gw.HandleWebSocket)\n\n\t// ── Rate limiter (shared across handlers) ───────────────────────────\n\trateLimiter := middleware.NewRateLimiter(db)\n\n\t// ── Webhook endpoint for pipelines (no session — uses token auth) ──\n\tr.Post(\"/api/pipelines/webhook/{id}\", pipelines.HandleWebhook)\n\n\t// ── API routes ─────────────────────────────────────────────────────\n\tr.Route(\"/api\", func(api chi.Router) {\n\t\t// Auth routes (no session required, login creates the session)\n\t\tauthHandler := &handlers.AuthHandler{\n\t\t\tDB:          db,\n\t\t\tGateway:     gw,\n\t\t\tRateLimiter: rateLimiter,\n\t\t\tConfig:      cfg,\n\t\t}\n\t\tapi.Route(\"/auth\", authHandler.Routes)\n\n\t\t// License status (no session required)\n\t\tlicenseHandler := &handlers.LicenseHandler{DB: db, Config: cfg}\n\t\tapi.Get(\"/license\", licenseHandler.GetLicense)\n\n\t\t// All routes below require a valid session\n\t\tapi.Group(func(protected chi.Router) {\n\t\t\tprotected.Use(middleware.Session(db, gw))\n\n\t\t\t// License activation (requires session)\n\t\t\tprotected.Post(\"/license/activate\", licenseHandler.ActivateLicense)\n\t\t\tprotected.Post(\"/license/deactivate\", licenseHandler.DeactivateLicense)\n\n\t\t\t// Query execution (community)\n\t\t\tqueryHandler := &handlers.QueryHandler{DB: db, Gateway: gw, Config: cfg, Guardrails: s.guardrails}\n\t\t\tprotected.Route(\"/query\", queryHandler.Routes)\n\n\t\t\t// Connections management (community)\n\t\t\tconnectionsHandler := &handlers.ConnectionsHandler{DB: db, Gateway: gw, Config: cfg}\n\t\t\tprotected.Route(\"/connections\", func(cr chi.Router) {\n\t\t\t\tcr.Get(\"/\", connectionsHandler.List)\n\t\t\t\tcr.Post(\"/\", connectionsHandler.Create)\n\t\t\t\tcr.Get(\"/{id}\", connectionsHandler.Get)\n\t\t\t\tcr.Delete(\"/{id}\", connectionsHandler.Delete)\n\t\t\t\tcr.Post(\"/{id}/test\", connectionsHandler.TestConnection)\n\t\t\t\tcr.Get(\"/{id}/token\", connectionsHandler.GetToken)\n\t\t\t\tcr.Post(\"/{id}/regenerate-token\", connectionsHandler.RegenerateToken)\n\t\t\t})\n\n\t\t\t// Saved queries (community)\n\t\t\tsavedQueriesHandler := &handlers.SavedQueriesHandler{DB: db}\n\t\t\tprotected.Route(\"/saved-queries\", savedQueriesHandler.Routes)\n\n\t\t\t// ── Community features ─────────────────────────────────────\n\t\t\t// Dashboards\n\t\t\tdashboardsHandler := &handlers.DashboardsHandler{DB: db, Gateway: gw, Config: cfg}\n\t\t\tprotected.Mount(\"/dashboards\", dashboardsHandler.Routes())\n\n\t\t\t// Pipelines\n\t\t\tpipelinesHandler := &handlers.PipelinesHandler{DB: db, Gateway: gw, Config: cfg, Runner: s.pipelineRunner}\n\t\t\tprotected.Mount(\"/pipelines\", pipelinesHandler.Routes())\n\n\t\t\t// Models (dbt-like SQL transformations)\n\t\t\tmodelsHandler := &handlers.ModelsHandler{DB: db, Gateway: gw, Config: cfg, Runner: s.modelRunner}\n\t\t\tprotected.Mount(\"/models\", modelsHandler.Routes())\n\n\t\t\t// Brain AI assistant\n\t\t\tbrainHandler := &handlers.BrainHandler{DB: db, Gateway: gw, Config: cfg, Langfuse: s.langfuse}\n\t\t\tprotected.Route(\"/brain\", brainHandler.Routes)\n\n\t\t\t// Admin routes (require admin role)\n\t\t\tadminHandler := &handlers.AdminHandler{\n\t\t\t\tDB:        db,\n\t\t\t\tGateway:   gw,\n\t\t\t\tConfig:    cfg,\n\t\t\t\tLangfuse:  s.langfuse,\n\t\t\t\tGovSyncer: s.govSyncer,\n\t\t\t}\n\t\t\tprotected.Route(\"/admin\", func(ar chi.Router) {\n\t\t\t\tadminHandler.Routes(ar)\n\t\t\t})\n\n\t\t\t// ── Pro-only features ──────────────────────────────────────\n\t\t\tprotected.Group(func(pro chi.Router) {\n\t\t\t\tpro.Use(middleware.RequirePro(cfg))\n\n\t\t\t\t// Scheduled jobs\n\t\t\t\tschedulesHandler := &handlers.SchedulesHandler{DB: db, Gateway: gw, Config: cfg}\n\t\t\t\tpro.Route(\"/schedules\", schedulesHandler.Routes)\n\n\t\t\t\t// Governance\n\t\t\t\tgovHandler := &handlers.GovernanceHandler{\n\t\t\t\t\tDB: db, Gateway: gw, Config: cfg,\n\t\t\t\t\tStore:  s.govSyncer.GetStore(),\n\t\t\t\t\tSyncer: s.govSyncer,\n\t\t\t\t}\n\t\t\t\tpro.Mount(\"/governance\", govHandler.Routes())\n\t\t\t})\n\t\t})\n\t})\n\n\t// ── SPA fallback (serve embedded frontend) ──────────────────────────\n\tif s.frontendFS != nil {\n\t\t// Check whether the frontend was actually built and embedded.\n\t\tif _, err := s.frontendFS.Open(\"index.html\"); err != nil {\n\t\t\tslog.Warn(\"Frontend assets not embedded; build the frontend first or use a release binary\")\n\t\t\tr.Get(\"/*\", func(w http.ResponseWriter, r *http.Request) {\n\t\t\t\tw.Header().Set(\"Content-Type\", \"text/plain; charset=utf-8\")\n\t\t\t\tw.WriteHeader(http.StatusNotFound)\n\t\t\t\tfmt.Fprintln(w, \"Frontend assets not available. Build the frontend first or use a release binary.\")\n\t\t\t})\n\t\t} else {\n\t\t\tfileServer := http.FileServer(http.FS(s.frontendFS))\n\t\t\tr.Get(\"/*\", func(w http.ResponseWriter, r *http.Request) {\n\t\t\t\t// Try to serve the file directly\n\t\t\t\tpath := r.URL.Path[1:] // strip leading /\n\t\t\t\tf, err := s.frontendFS.Open(path)\n\t\t\t\tif err != nil {\n\t\t\t\t\t// File not found — serve index.html for SPA routing\n\t\t\t\t\tw.Header().Set(\"Cache-Control\", \"no-cache\")\n\t\t\t\t\tr.URL.Path = \"/\"\n\t\t\t\t} else {\n\t\t\t\t\tf.Close()\n\t\t\t\t\tif strings.HasPrefix(path, \"assets/\") {\n\t\t\t\t\t\tw.Header().Set(\"Cache-Control\", \"public, max-age=31536000, immutable\")\n\t\t\t\t\t} else {\n\t\t\t\t\t\tw.Header().Set(\"Cache-Control\", \"no-cache\")\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tfileServer.ServeHTTP(w, r)\n\t\t\t})\n\t\t}\n\t}\n\n\tslog.Info(\"Routes configured\")\n}\n\n// Start starts the HTTP server.\nfunc (s *Server) Start() error {\n\ts.scheduler.Start()\n\ts.pipelineRunner.Start()\n\ts.modelScheduler.Start()\n\tswitch {\n\tcase !s.cfg.IsPro():\n\t\tslog.Info(\"Governance background sync disabled (requires Pro license)\")\n\tcase !s.db.GovernanceSyncEnabled():\n\t\tslog.Info(\"Governance background sync disabled (opt-in required; enable in Governance → Settings)\")\n\tdefault:\n\t\ts.govSyncer.StartBackground()\n\t}\n\ts.alerts.Start()\n\ts.langfuse.Start()\n\n\tslog.Info(\"Server listening\", \"addr\", s.http.Addr)\n\treturn s.http.ListenAndServe()\n}\n\n// Shutdown gracefully stops the server.\nfunc (s *Server) Shutdown(ctx context.Context) error {\n\tslog.Info(\"Graceful shutdown initiated\")\n\ts.scheduler.Stop()\n\ts.pipelineRunner.Stop()\n\ts.modelScheduler.Stop()\n\ts.govSyncer.Stop()\n\ts.alerts.Stop()\n\ts.langfuse.Stop()\n\ts.gateway.Stop()\n\treturn s.http.Shutdown(ctx)\n}\n\n// loadLangfuseConfigFromDB reads Langfuse configuration from the settings table.\nfunc loadLangfuseConfigFromDB(db *database.DB, appSecretKey string) (langfuse.Config, error) {\n\tvar cfg langfuse.Config\n\n\tpublicKey, err := db.GetSetting(\"langfuse.public_key\")\n\tif err != nil {\n\t\treturn cfg, err\n\t}\n\tcfg.PublicKey = publicKey\n\n\tencryptedSecret, err := db.GetSetting(\"langfuse.secret_key\")\n\tif err != nil {\n\t\treturn cfg, err\n\t}\n\tif encryptedSecret != \"\" {\n\t\tdecrypted, err := crypto.Decrypt(encryptedSecret, appSecretKey)\n\t\tif err != nil {\n\t\t\treturn cfg, fmt.Errorf(\"decrypt langfuse secret: %w\", err)\n\t\t}\n\t\tcfg.SecretKey = decrypted\n\t}\n\n\tbaseURL, err := db.GetSetting(\"langfuse.base_url\")\n\tif err != nil {\n\t\treturn cfg, err\n\t}\n\tcfg.BaseURL = baseURL\n\tcfg.NormalizeBaseURL()\n\n\treturn cfg, nil\n}\n"
  },
  {
    "path": "internal/tunnel/api.go",
    "content": "package tunnel\n\nimport (\n\t\"encoding/json\"\n\t\"errors\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n\t\"github.com/gorilla/websocket\"\n)\n\n// IsTunnelOnline checks if a tunnel connection is currently active.\nfunc (g *Gateway) IsTunnelOnline(connectionID string) bool {\n\t_, ok := g.tunnels.Load(connectionID)\n\treturn ok\n}\n\n// GetTunnelStatus returns the online status and last seen time for a connection.\nfunc (g *Gateway) GetTunnelStatus(connectionID string) (online bool, lastSeen time.Time) {\n\tval, ok := g.tunnels.Load(connectionID)\n\tif !ok {\n\t\treturn false, time.Time{}\n\t}\n\tt := val.(*ConnectedTunnel)\n\treturn true, t.LastSeen\n}\n\n// GetConnectedCount returns the number of currently connected tunnels.\nfunc (g *Gateway) GetConnectedCount() int {\n\tcount := 0\n\tg.tunnels.Range(func(_, _ any) bool {\n\t\tcount++\n\t\treturn true\n\t})\n\treturn count\n}\n\n// ExecuteQuery sends a SQL query to the agent via the tunnel and waits for a result.\nfunc (g *Gateway) ExecuteQuery(connectionID, sql, user, password string, timeout time.Duration) (*QueryResult, error) {\n\tval, ok := g.tunnels.Load(connectionID)\n\tif !ok {\n\t\treturn nil, errors.New(\"tunnel not connected\")\n\t}\n\tt := val.(*ConnectedTunnel)\n\n\trequestID := uuid.NewString()\n\tpending := &PendingRequest{\n\t\tResultCh: make(chan json.RawMessage, 1),\n\t\tErrorCh:  make(chan error, 1),\n\t}\n\tt.Pending.Store(requestID, pending)\n\tdefer t.Pending.Delete(requestID)\n\n\tmsg := GatewayMessage{\n\t\tType:     \"query\",\n\t\tID:       requestID,\n\t\tQueryID:  requestID,\n\t\tSQL:      sql,\n\t\tQuery:    sql,\n\t\tUser:     user,\n\t\tPassword: password,\n\t}\n\n\tdata, _ := json.Marshal(msg)\n\tt.mu.Lock()\n\terr := t.WS.WriteMessage(websocket.TextMessage, data)\n\tt.mu.Unlock()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tselect {\n\tcase payload := <-pending.ResultCh:\n\t\tvar result QueryResult\n\t\tif err := json.Unmarshal(payload, &result); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn &result, nil\n\tcase err := <-pending.ErrorCh:\n\t\treturn nil, err\n\tcase <-time.After(timeout):\n\t\t// Send cancel to agent\n\t\tcancel := GatewayMessage{\n\t\t\tType:    \"cancel_query\",\n\t\t\tID:      requestID,\n\t\t\tQueryID: requestID,\n\t\t}\n\t\tcancelData, _ := json.Marshal(cancel)\n\t\tt.mu.Lock()\n\t\tt.WS.WriteMessage(websocket.TextMessage, cancelData)\n\t\tt.mu.Unlock()\n\t\treturn nil, errors.New(\"query timeout\")\n\t}\n}\n\n// ExecuteQueryWithFormat sends a SQL query with a specific output format and returns the raw result.\n// The format parameter (e.g. \"JSONCompact\") is passed to the agent, which appends FORMAT <format> to the query.\n// Returns the raw ClickHouse response as-is (no intermediate parse/reserialize).\nfunc (g *Gateway) ExecuteQueryWithFormat(connectionID, sql, user, password, format string, timeout time.Duration) (json.RawMessage, error) {\n\tval, ok := g.tunnels.Load(connectionID)\n\tif !ok {\n\t\treturn nil, errors.New(\"tunnel not connected\")\n\t}\n\tt := val.(*ConnectedTunnel)\n\n\trequestID := uuid.NewString()\n\tpending := &PendingRequest{\n\t\tResultCh: make(chan json.RawMessage, 1),\n\t\tErrorCh:  make(chan error, 1),\n\t}\n\tt.Pending.Store(requestID, pending)\n\tdefer t.Pending.Delete(requestID)\n\n\tmsg := GatewayMessage{\n\t\tType:     \"query\",\n\t\tID:       requestID,\n\t\tQueryID:  requestID,\n\t\tSQL:      sql,\n\t\tQuery:    sql,\n\t\tUser:     user,\n\t\tPassword: password,\n\t\tFormat:   format,\n\t}\n\n\tdata, _ := json.Marshal(msg)\n\tt.mu.Lock()\n\terr := t.WS.WriteMessage(websocket.TextMessage, data)\n\tt.mu.Unlock()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tselect {\n\tcase payload := <-pending.ResultCh:\n\t\t// The payload is a marshaled QueryResult{Data, Meta, Stats}.\n\t\t// For format-aware queries the agent puts the raw CH response in Data.\n\t\tvar result QueryResult\n\t\tif err := json.Unmarshal(payload, &result); err != nil {\n\t\t\treturn payload, nil // fallback: return as-is\n\t\t}\n\t\tif len(result.Data) > 0 {\n\t\t\treturn result.Data, nil\n\t\t}\n\t\treturn payload, nil\n\tcase err := <-pending.ErrorCh:\n\t\treturn nil, err\n\tcase <-time.After(timeout):\n\t\tcancel := GatewayMessage{\n\t\t\tType:    \"cancel_query\",\n\t\t\tID:      requestID,\n\t\t\tQueryID: requestID,\n\t\t}\n\t\tcancelData, _ := json.Marshal(cancel)\n\t\tt.mu.Lock()\n\t\tt.WS.WriteMessage(websocket.TextMessage, cancelData)\n\t\tt.mu.Unlock()\n\t\treturn nil, errors.New(\"query timeout\")\n\t}\n}\n\n// ExecuteStreamQuery sends a streaming query to the agent and returns channels for progressive consumption.\n// The caller must range over stream.ChunkCh, then select on stream.DoneCh/ErrorCh.\n// Call CleanupStream when done to release resources.\nfunc (g *Gateway) ExecuteStreamQuery(connectionID, sql, user, password string, settings map[string]string) (requestID string, stream *PendingStreamRequest, err error) {\n\tval, ok := g.tunnels.Load(connectionID)\n\tif !ok {\n\t\treturn \"\", nil, errors.New(\"tunnel not connected\")\n\t}\n\tt := val.(*ConnectedTunnel)\n\n\trequestID = uuid.NewString()\n\tstream = &PendingStreamRequest{\n\t\tMetaCh:  make(chan json.RawMessage, 1),\n\t\tChunkCh: make(chan json.RawMessage, 8),\n\t\tDoneCh:  make(chan json.RawMessage, 1),\n\t\tErrorCh: make(chan error, 1),\n\t}\n\tt.Pending.Store(requestID, stream)\n\n\tmsg := GatewayMessage{\n\t\tType:     \"query_stream\",\n\t\tID:       requestID,\n\t\tQueryID:  requestID,\n\t\tSQL:      sql,\n\t\tQuery:    sql,\n\t\tUser:     user,\n\t\tPassword: password,\n\t\tSettings: settings,\n\t}\n\n\tdata, _ := json.Marshal(msg)\n\tt.mu.Lock()\n\twsErr := t.WS.WriteMessage(websocket.TextMessage, data)\n\tt.mu.Unlock()\n\tif wsErr != nil {\n\t\tt.Pending.Delete(requestID)\n\t\treturn \"\", nil, wsErr\n\t}\n\n\treturn requestID, stream, nil\n}\n\n// CleanupStream removes a pending stream request from the tunnel's pending map.\n// Call this when the HTTP handler finishes (completion, error, or client disconnect).\nfunc (g *Gateway) CleanupStream(connectionID, requestID string) {\n\tval, ok := g.tunnels.Load(connectionID)\n\tif !ok {\n\t\treturn\n\t}\n\tt := val.(*ConnectedTunnel)\n\tt.Pending.Delete(requestID)\n}\n\n// TestConnection tests a ClickHouse connection through the tunnel.\nfunc (g *Gateway) TestConnection(connectionID, user, password string, timeout time.Duration) (*TestResult, error) {\n\tval, ok := g.tunnels.Load(connectionID)\n\tif !ok {\n\t\treturn nil, errors.New(\"tunnel not connected\")\n\t}\n\tt := val.(*ConnectedTunnel)\n\n\trequestID := uuid.NewString()\n\tpending := &PendingRequest{\n\t\tResultCh: make(chan json.RawMessage, 1),\n\t\tErrorCh:  make(chan error, 1),\n\t}\n\tt.Pending.Store(requestID, pending)\n\tdefer t.Pending.Delete(requestID)\n\n\tmsg := GatewayMessage{\n\t\tType:     \"test_connection\",\n\t\tID:       requestID,\n\t\tQueryID:  requestID,\n\t\tUser:     user,\n\t\tPassword: password,\n\t}\n\n\tdata, _ := json.Marshal(msg)\n\tt.mu.Lock()\n\terr := t.WS.WriteMessage(websocket.TextMessage, data)\n\tt.mu.Unlock()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tselect {\n\tcase payload := <-pending.ResultCh:\n\t\tvar result TestResult\n\t\tif err := json.Unmarshal(payload, &result); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn &result, nil\n\tcase err := <-pending.ErrorCh:\n\t\treturn nil, err\n\tcase <-time.After(timeout):\n\t\treturn nil, errors.New(\"connection test timeout\")\n\t}\n}\n"
  },
  {
    "path": "internal/tunnel/gateway.go",
    "content": "package tunnel\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"log/slog\"\n\t\"net\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"strings\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/gorilla/websocket\"\n\n\t\"github.com/caioricciuti/ch-ui/internal/database\"\n)\n\nvar upgrader = websocket.Upgrader{\n\tCheckOrigin: func(r *http.Request) bool {\n\t\torigin := r.Header.Get(\"Origin\")\n\t\t// Non-browser clients (like ch-ui-agent) typically do not send Origin.\n\t\tif origin == \"\" {\n\t\t\treturn true\n\t\t}\n\n\t\toriginURL, err := url.Parse(origin)\n\t\tif err != nil {\n\t\t\treturn false\n\t\t}\n\n\t\toriginHost, originPort := splitHostPort(originURL.Host)\n\t\trequestHost, requestPort := splitHostPort(r.Host)\n\t\tif originHost == \"\" || requestHost == \"\" {\n\t\t\treturn false\n\t\t}\n\n\t\tsamePort := originPort == \"\" || requestPort == \"\" || originPort == requestPort\n\t\tif samePort && strings.EqualFold(originHost, requestHost) {\n\t\t\treturn true\n\t\t}\n\n\t\t// Local development friendliness: consider localhost and loopback IPs equivalent.\n\t\tif samePort && isLoopbackHost(originHost) && isLoopbackHost(requestHost) {\n\t\t\treturn true\n\t\t}\n\n\t\treturn false\n\t},\n\tEnableCompression: true,\n}\n\nfunc splitHostPort(hostport string) (string, string) {\n\thostport = strings.TrimSpace(hostport)\n\tif hostport == \"\" {\n\t\treturn \"\", \"\"\n\t}\n\n\thost, port, err := net.SplitHostPort(hostport)\n\tif err == nil {\n\t\treturn strings.Trim(host, \"[]\"), strings.TrimSpace(port)\n\t}\n\n\t// Host without explicit port.\n\treturn strings.Trim(hostport, \"[]\"), \"\"\n}\n\nfunc isLoopbackHost(host string) bool {\n\tif strings.EqualFold(host, \"localhost\") {\n\t\treturn true\n\t}\n\tip := net.ParseIP(host)\n\treturn ip != nil && ip.IsLoopback()\n}\n\n// PendingRequest represents a request waiting for a response from the agent.\ntype PendingRequest struct {\n\tResultCh chan json.RawMessage // receives the full response payload\n\tErrorCh  chan error\n}\n\n// PendingStreamRequest represents a streaming query waiting for chunked responses.\ntype PendingStreamRequest struct {\n\tMetaCh  chan json.RawMessage // receives query_stream_start meta\n\tChunkCh chan json.RawMessage // receives query_stream_chunk data (buffered)\n\tDoneCh  chan json.RawMessage // receives query_stream_end statistics\n\tErrorCh chan error\n}\n\n// ConnectedTunnel represents an active tunnel agent connection.\ntype ConnectedTunnel struct {\n\tConnectionID   string\n\tConnectionName string\n\tWS             *websocket.Conn\n\tLastSeen       time.Time\n\tPending        sync.Map // map[requestID]*PendingRequest\n\tmu             sync.Mutex\n}\n\n// Gateway manages WebSocket connections from tunnel agents.\ntype Gateway struct {\n\tdb      *database.DB\n\ttunnels sync.Map // map[connectionID]*ConnectedTunnel\n\tstopCh  chan struct{}\n}\n\n// NewGateway creates a new tunnel gateway.\nfunc NewGateway(db *database.DB) *Gateway {\n\tg := &Gateway{\n\t\tdb:     db,\n\t\tstopCh: make(chan struct{}),\n\t}\n\tgo g.heartbeatLoop()\n\tslog.Info(\"Tunnel gateway initialized\")\n\treturn g\n}\n\n// Stop stops the gateway heartbeat.\nfunc (g *Gateway) Stop() {\n\tclose(g.stopCh)\n}\n\n// HandleWebSocket handles the WebSocket upgrade and read loop for a tunnel agent.\nfunc (g *Gateway) HandleWebSocket(w http.ResponseWriter, r *http.Request) {\n\tconn, err := upgrader.Upgrade(w, r, nil)\n\tif err != nil {\n\t\tslog.Error(\"WebSocket upgrade failed\", \"error\", err)\n\t\treturn\n\t}\n\n\tslog.Debug(\"New tunnel WebSocket connection\")\n\n\t// Read loop\n\tgo g.readLoop(conn)\n}\n\nfunc (g *Gateway) readLoop(conn *websocket.Conn) {\n\tvar connID string // set after auth\n\n\tconn.SetPingHandler(func(appData string) error {\n\t\tif connID != \"\" {\n\t\t\tg.touchTunnel(connID)\n\t\t}\n\t\t// Keep Gorilla's default behavior: reply with a Pong control frame.\n\t\treturn conn.WriteControl(websocket.PongMessage, []byte(appData), time.Now().Add(5*time.Second))\n\t})\n\tconn.SetPongHandler(func(_ string) error {\n\t\tif connID != \"\" {\n\t\t\tg.touchTunnel(connID)\n\t\t}\n\t\treturn nil\n\t})\n\n\tdefer func() {\n\t\tif connID != \"\" {\n\t\t\tg.handleDisconnect(connID, conn)\n\t\t}\n\t\tconn.Close()\n\t}()\n\n\tfor {\n\t\t_, message, err := conn.ReadMessage()\n\t\tif err != nil {\n\t\t\tif websocket.IsUnexpectedCloseError(err, websocket.CloseGoingAway, websocket.CloseNormalClosure) {\n\t\t\t\tslog.Debug(\"Tunnel WebSocket read error\", \"error\", err)\n\t\t\t}\n\t\t\treturn\n\t\t}\n\n\t\tvar msg AgentMessage\n\t\tif err := json.Unmarshal(message, &msg); err != nil {\n\t\t\tslog.Warn(\"Failed to parse tunnel message\", \"error\", err)\n\t\t\tconn.WriteMessage(websocket.CloseMessage,\n\t\t\t\twebsocket.FormatCloseMessage(websocket.ClosePolicyViolation, \"Invalid message format\"))\n\t\t\treturn\n\t\t}\n\n\t\t// Any valid message means the tunnel is alive.\n\t\tif connID != \"\" {\n\t\t\tg.touchTunnel(connID)\n\t\t}\n\n\t\tswitch msg.Type {\n\t\tcase \"auth\":\n\t\t\tauthConnID := g.handleAuth(conn, msg.Token, msg.Takeover)\n\t\t\tif authConnID == \"\" {\n\t\t\t\treturn // auth failed, connection closed\n\t\t\t}\n\t\t\tconnID = authConnID\n\n\t\tcase \"pong\":\n\t\t\tg.handlePong(connID)\n\n\t\tcase \"query_result\":\n\t\t\tg.handleQueryResult(connID, &msg)\n\n\t\tcase \"query_error\":\n\t\t\tg.handleQueryError(connID, &msg)\n\n\t\tcase \"test_result\":\n\t\t\tg.handleTestResult(connID, &msg)\n\n\t\tcase \"host_info\":\n\t\t\tg.handleHostInfo(connID, &msg)\n\n\t\tcase \"query_stream_start\":\n\t\t\tg.handleStreamStart(connID, &msg)\n\n\t\tcase \"query_stream_chunk\":\n\t\t\tg.handleStreamChunk(connID, &msg)\n\n\t\tcase \"query_stream_end\":\n\t\t\tg.handleStreamEnd(connID, &msg)\n\n\t\tcase \"query_stream_error\":\n\t\t\tg.handleStreamError(connID, &msg)\n\n\t\tdefault:\n\t\t\tslog.Warn(\"Unknown tunnel message type\", \"type\", msg.Type)\n\t\t}\n\t}\n}\n\nfunc (g *Gateway) handleAuth(conn *websocket.Conn, token string, takeover bool) string {\n\tremoteAddr := \"\"\n\tif conn != nil && conn.RemoteAddr() != nil {\n\t\tremoteAddr = conn.RemoteAddr().String()\n\t}\n\n\tauthCtx, cancel := context.WithTimeout(context.Background(), 3*time.Second)\n\tdefer cancel()\n\n\ttc, err := g.db.GetConnectionByTokenCtx(authCtx, token)\n\tif err != nil {\n\t\tslog.Warn(\"Tunnel auth failed: token lookup error\", \"remote_addr\", remoteAddr, \"error\", err)\n\t\tg.sendJSON(conn, GatewayMessage{Type: \"auth_error\", Message: \"Tunnel auth temporarily unavailable. Please retry.\"})\n\t\tconn.WriteMessage(websocket.CloseMessage,\n\t\t\twebsocket.FormatCloseMessage(websocket.CloseTryAgainLater, \"Auth backend busy\"))\n\t\treturn \"\"\n\t}\n\tif tc == nil {\n\t\tslog.Debug(\"Tunnel auth failed: invalid token\", \"remote_addr\", remoteAddr)\n\t\tg.sendJSON(conn, GatewayMessage{Type: \"auth_error\", Message: \"Invalid tunnel token\"})\n\t\tconn.WriteMessage(websocket.CloseMessage,\n\t\t\twebsocket.FormatCloseMessage(websocket.ClosePolicyViolation, \"Invalid token\"))\n\t\treturn \"\"\n\t}\n\n\t// If a healthy connection already exists for this token, reject duplicates.\n\t// This avoids two agent processes evicting each other in a reconnect loop.\n\tif existing, ok := g.tunnels.Load(tc.ID); ok {\n\t\tt := existing.(*ConnectedTunnel)\n\t\tisHealthy := time.Since(t.LastSeen) < 45*time.Second\n\t\tif isHealthy && !takeover {\n\t\t\tslog.Warn(\"Tunnel auth rejected: connection already active\", \"name\", tc.Name)\n\t\t\tg.sendJSON(conn, GatewayMessage{Type: \"auth_error\", Message: \"Tunnel token already connected (use --takeover to replace it)\"})\n\t\t\tconn.WriteMessage(websocket.CloseMessage,\n\t\t\t\twebsocket.FormatCloseMessage(websocket.ClosePolicyViolation, \"Token already connected\"))\n\t\t\treturn \"\"\n\t\t}\n\t\tif isHealthy && takeover {\n\t\t\tslog.Warn(\"Replacing active tunnel connection via takeover\", \"name\", tc.Name)\n\t\t}\n\n\t\t// Replace stale session.\n\t\tt.mu.Lock()\n\t\t_ = t.WS.Close()\n\t\tt.mu.Unlock()\n\t\tg.handleDisconnect(tc.ID, t.WS)\n\t\tslog.Warn(\"Replaced stale tunnel connection\", \"name\", tc.Name)\n\t}\n\n\ttunnel := &ConnectedTunnel{\n\t\tConnectionID:   tc.ID,\n\t\tConnectionName: tc.Name,\n\t\tWS:             conn,\n\t\tLastSeen:       time.Now(),\n\t}\n\tg.tunnels.Store(tc.ID, tunnel)\n\n\tg.db.UpdateConnectionStatus(tc.ID, \"connected\")\n\n\tg.sendJSON(conn, GatewayMessage{\n\t\tType:           \"auth_ok\",\n\t\tConnectionID:   tc.ID,\n\t\tConnectionName: tc.Name,\n\t})\n\n\tslog.Info(\"Tunnel agent authenticated\", \"name\", tc.Name, \"connection_id\", tc.ID)\n\n\tg.db.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"tunnel.connected\",\n\t\tConnectionID: strPtr(tc.ID),\n\t})\n\n\treturn tc.ID\n}\n\nfunc (g *Gateway) handlePong(connID string) {\n\tif connID == \"\" {\n\t\treturn\n\t}\n\tg.touchTunnel(connID)\n\tg.db.UpdateConnectionStatus(connID, \"connected\")\n}\n\nfunc (g *Gateway) touchTunnel(connID string) {\n\tif connID == \"\" {\n\t\treturn\n\t}\n\tif val, ok := g.tunnels.Load(connID); ok {\n\t\tt := val.(*ConnectedTunnel)\n\t\tt.LastSeen = time.Now()\n\t}\n}\n\nfunc (g *Gateway) handleQueryResult(connID string, msg *AgentMessage) {\n\tid := msg.GetMessageID()\n\tif connID == \"\" || id == \"\" {\n\t\treturn\n\t}\n\n\tval, ok := g.tunnels.Load(connID)\n\tif !ok {\n\t\treturn\n\t}\n\tt := val.(*ConnectedTunnel)\n\n\tpendingVal, ok := t.Pending.LoadAndDelete(id)\n\tif !ok {\n\t\tslog.Warn(\"Query result for unknown request\", \"id\", id)\n\t\treturn\n\t}\n\tpending := pendingVal.(*PendingRequest)\n\n\t// Build the result payload\n\tresult := QueryResult{\n\t\tData:  msg.Data,\n\t\tMeta:  msg.Meta,\n\t\tStats: msg.GetStats(),\n\t}\n\tpayload, _ := json.Marshal(result)\n\n\tselect {\n\tcase pending.ResultCh <- payload:\n\tdefault:\n\t}\n}\n\nfunc (g *Gateway) handleQueryError(connID string, msg *AgentMessage) {\n\tid := msg.GetMessageID()\n\tif connID == \"\" || id == \"\" {\n\t\treturn\n\t}\n\n\tval, ok := g.tunnels.Load(connID)\n\tif !ok {\n\t\treturn\n\t}\n\tt := val.(*ConnectedTunnel)\n\n\tpendingVal, ok := t.Pending.LoadAndDelete(id)\n\tif !ok {\n\t\treturn\n\t}\n\tpending := pendingVal.(*PendingRequest)\n\n\tselect {\n\tcase pending.ErrorCh <- errors.New(msg.Error):\n\tdefault:\n\t}\n}\n\nfunc (g *Gateway) handleTestResult(connID string, msg *AgentMessage) {\n\tid := msg.GetMessageID()\n\tif connID == \"\" || id == \"\" {\n\t\treturn\n\t}\n\n\tval, ok := g.tunnels.Load(connID)\n\tif !ok {\n\t\treturn\n\t}\n\tt := val.(*ConnectedTunnel)\n\n\tpendingVal, ok := t.Pending.LoadAndDelete(id)\n\tif !ok {\n\t\treturn\n\t}\n\tpending := pendingVal.(*PendingRequest)\n\n\tif msg.IsTestSuccess() {\n\t\tresult := TestResult{Success: true, Version: msg.Version}\n\t\tpayload, _ := json.Marshal(result)\n\t\tselect {\n\t\tcase pending.ResultCh <- payload:\n\t\tdefault:\n\t\t}\n\t} else {\n\t\terrMsg := msg.Error\n\t\tif errMsg == \"\" {\n\t\t\terrMsg = \"Connection test failed\"\n\t\t}\n\t\tselect {\n\t\tcase pending.ErrorCh <- errors.New(errMsg):\n\t\tdefault:\n\t\t}\n\t}\n}\n\nfunc (g *Gateway) handleHostInfo(connID string, msg *AgentMessage) {\n\tif connID == \"\" || len(msg.HostInfo) == 0 {\n\t\treturn\n\t}\n\n\tvar info database.HostInfo\n\tif err := json.Unmarshal(msg.HostInfo, &info); err != nil {\n\t\tslog.Warn(\"Failed to parse host info\", \"error\", err)\n\t\treturn\n\t}\n\n\tg.db.UpdateConnectionHostInfo(connID, info)\n\tslog.Debug(\"Host info received\", \"connection\", connID, \"hostname\", info.Hostname)\n}\n\nfunc (g *Gateway) handleStreamStart(connID string, msg *AgentMessage) {\n\tid := msg.GetMessageID()\n\tif connID == \"\" || id == \"\" {\n\t\treturn\n\t}\n\n\tval, ok := g.tunnels.Load(connID)\n\tif !ok {\n\t\treturn\n\t}\n\tt := val.(*ConnectedTunnel)\n\n\tpendingVal, ok := t.Pending.Load(id)\n\tif !ok {\n\t\tslog.Warn(\"Stream start for unknown request\", \"id\", id)\n\t\treturn\n\t}\n\tpending, ok := pendingVal.(*PendingStreamRequest)\n\tif !ok {\n\t\treturn\n\t}\n\n\tselect {\n\tcase pending.MetaCh <- msg.Meta:\n\tdefault:\n\t}\n}\n\nfunc (g *Gateway) handleStreamChunk(connID string, msg *AgentMessage) {\n\tid := msg.GetMessageID()\n\tif connID == \"\" || id == \"\" {\n\t\treturn\n\t}\n\n\tval, ok := g.tunnels.Load(connID)\n\tif !ok {\n\t\treturn\n\t}\n\tt := val.(*ConnectedTunnel)\n\n\tpendingVal, ok := t.Pending.Load(id)\n\tif !ok {\n\t\treturn\n\t}\n\tpending, ok := pendingVal.(*PendingStreamRequest)\n\tif !ok {\n\t\treturn\n\t}\n\n\tpending.ChunkCh <- msg.Data // backpressure: blocks if consumer is slow\n}\n\nfunc (g *Gateway) handleStreamEnd(connID string, msg *AgentMessage) {\n\tid := msg.GetMessageID()\n\tif connID == \"\" || id == \"\" {\n\t\treturn\n\t}\n\n\tval, ok := g.tunnels.Load(connID)\n\tif !ok {\n\t\treturn\n\t}\n\tt := val.(*ConnectedTunnel)\n\n\tpendingVal, ok := t.Pending.LoadAndDelete(id)\n\tif !ok {\n\t\treturn\n\t}\n\tpending, ok := pendingVal.(*PendingStreamRequest)\n\tif !ok {\n\t\treturn\n\t}\n\n\tdone := StreamDone{\n\t\tStatistics: msg.GetStats(),\n\t\tTotalRows:  msg.TotalRows,\n\t}\n\tpayload, _ := json.Marshal(done)\n\n\tclose(pending.ChunkCh)\n\tselect {\n\tcase pending.DoneCh <- payload:\n\tdefault:\n\t}\n}\n\nfunc (g *Gateway) handleStreamError(connID string, msg *AgentMessage) {\n\tid := msg.GetMessageID()\n\tif connID == \"\" || id == \"\" {\n\t\treturn\n\t}\n\n\tval, ok := g.tunnels.Load(connID)\n\tif !ok {\n\t\treturn\n\t}\n\tt := val.(*ConnectedTunnel)\n\n\tpendingVal, ok := t.Pending.LoadAndDelete(id)\n\tif !ok {\n\t\treturn\n\t}\n\tpending, ok := pendingVal.(*PendingStreamRequest)\n\tif !ok {\n\t\treturn\n\t}\n\n\tclose(pending.ChunkCh)\n\tselect {\n\tcase pending.ErrorCh <- errors.New(msg.Error):\n\tdefault:\n\t}\n}\n\nfunc (g *Gateway) handleDisconnect(connID string, ws *websocket.Conn) {\n\tval, ok := g.tunnels.Load(connID)\n\tif !ok {\n\t\treturn\n\t}\n\tt := val.(*ConnectedTunnel)\n\tif ws != nil && t.WS != ws {\n\t\t// A newer connection already replaced this one.\n\t\treturn\n\t}\n\tif !g.tunnels.CompareAndDelete(connID, t) {\n\t\treturn\n\t}\n\n\t// Reject all pending requests\n\tt.Pending.Range(func(key, value any) bool {\n\t\tswitch p := value.(type) {\n\t\tcase *PendingRequest:\n\t\t\tselect {\n\t\t\tcase p.ErrorCh <- errors.New(\"tunnel disconnected\"):\n\t\t\tdefault:\n\t\t\t}\n\t\tcase *PendingStreamRequest:\n\t\t\tclose(p.ChunkCh)\n\t\t\tselect {\n\t\t\tcase p.ErrorCh <- errors.New(\"tunnel disconnected\"):\n\t\t\tdefault:\n\t\t\t}\n\t\t}\n\t\tt.Pending.Delete(key)\n\t\treturn true\n\t})\n\n\tg.db.UpdateConnectionStatus(connID, \"disconnected\")\n\n\tslog.Info(\"Tunnel disconnected\", \"name\", t.ConnectionName, \"connection_id\", connID)\n\n\tg.db.CreateAuditLog(database.AuditLogParams{\n\t\tAction:       \"tunnel.disconnected\",\n\t\tConnectionID: strPtr(connID),\n\t})\n}\n\nfunc strPtr(s string) *string { return &s }\n\nfunc (g *Gateway) sendJSON(conn *websocket.Conn, msg GatewayMessage) {\n\tdata, _ := json.Marshal(msg)\n\tconn.WriteMessage(websocket.TextMessage, data)\n}\n\n// heartbeatLoop pings all connected agents every 30 seconds.\nfunc (g *Gateway) heartbeatLoop() {\n\tticker := time.NewTicker(30 * time.Second)\n\tdefer ticker.Stop()\n\n\tfor {\n\t\tselect {\n\t\tcase <-g.stopCh:\n\t\t\treturn\n\t\tcase <-ticker.C:\n\t\t\tg.pingAll()\n\t\t}\n\t}\n}\n\nfunc (g *Gateway) pingAll() {\n\tnow := time.Now()\n\tstaleThreshold := 3 * time.Minute\n\n\tg.tunnels.Range(func(key, value any) bool {\n\t\tconnID := key.(string)\n\t\tt := value.(*ConnectedTunnel)\n\n\t\tif now.Sub(t.LastSeen) > staleThreshold {\n\t\t\tslog.Warn(\"Tunnel connection stale, removing\", \"name\", t.ConnectionName, \"lastSeen\", t.LastSeen)\n\t\t\tt.mu.Lock()\n\t\t\tt.WS.Close()\n\t\t\tt.mu.Unlock()\n\t\t\tg.handleDisconnect(connID, t.WS)\n\t\t\treturn true\n\t\t}\n\n\t\tping := GatewayMessage{Type: \"ping\"}\n\t\tdata, _ := json.Marshal(ping)\n\t\tt.mu.Lock()\n\t\terr := t.WS.WriteMessage(websocket.TextMessage, data)\n\t\tt.mu.Unlock()\n\t\tif err != nil {\n\t\t\tslog.Warn(\"Ping failed\", \"name\", t.ConnectionName, \"error\", err)\n\t\t\tg.handleDisconnect(connID, t.WS)\n\t\t}\n\t\treturn true\n\t})\n}\n"
  },
  {
    "path": "internal/tunnel/protocol.go",
    "content": "package tunnel\n\nimport \"encoding/json\"\n\n// AgentMessage represents messages from the tunnel agent to the gateway.\ntype AgentMessage struct {\n\tType       string          `json:\"type\"`\n\tToken      string          `json:\"token,omitempty\"`      // auth\n\tTakeover   bool            `json:\"takeover,omitempty\"`   // auth (replace active session)\n\tID         string          `json:\"id,omitempty\"`         // legacy JS agent\n\tQueryID    string          `json:\"query_id,omitempty\"`   // Go agent\n\tData       json.RawMessage `json:\"data,omitempty\"`       // query_result, query_stream_chunk\n\tMeta       json.RawMessage `json:\"meta,omitempty\"`       // query_result, query_stream_start\n\tStats      json.RawMessage `json:\"stats,omitempty\"`      // query_result (legacy)\n\tStatistics json.RawMessage `json:\"statistics,omitempty\"` // query_result (Go agent), query_stream_end\n\tError      string          `json:\"error,omitempty\"`      // query_error, query_stream_error\n\tSuccess    *bool           `json:\"success,omitempty\"`    // test_result (legacy)\n\tOnline     *bool           `json:\"online,omitempty\"`     // test_result (Go agent)\n\tVersion    string          `json:\"version,omitempty\"`    // test_result\n\tHostInfo   json.RawMessage `json:\"host_info,omitempty\"`  // host_info\n\tSeq        int             `json:\"seq,omitempty\"`        // query_stream_chunk sequence number\n\tTotalRows  int64           `json:\"total_rows,omitempty\"` // query_stream_end total row count\n}\n\n// GetMessageID returns the message ID from either legacy or Go agent format.\nfunc (m *AgentMessage) GetMessageID() string {\n\tif m.QueryID != \"\" {\n\t\treturn m.QueryID\n\t}\n\treturn m.ID\n}\n\n// GetStats returns stats from either legacy or Go agent format.\nfunc (m *AgentMessage) GetStats() json.RawMessage {\n\tif len(m.Stats) > 0 {\n\t\treturn m.Stats\n\t}\n\treturn m.Statistics\n}\n\n// IsTestSuccess normalizes the test result between legacy and Go agent.\nfunc (m *AgentMessage) IsTestSuccess() bool {\n\tif m.Success != nil {\n\t\treturn *m.Success\n\t}\n\tif m.Online != nil {\n\t\treturn *m.Online\n\t}\n\treturn false\n}\n\n// GatewayMessage represents messages from the gateway to the tunnel agent.\ntype GatewayMessage struct {\n\tType           string `json:\"type\"`\n\tConnectionID   string `json:\"connectionId,omitempty\"`   // auth_ok\n\tConnectionName string `json:\"connectionName,omitempty\"` // auth_ok\n\tMessage        string `json:\"message,omitempty\"`        // auth_error\n\tID             string `json:\"id,omitempty\"`             // legacy JS agent\n\tQueryID        string `json:\"query_id,omitempty\"`       // Go agent\n\tSQL            string `json:\"sql,omitempty\"`            // query (legacy)\n\tQuery          string `json:\"query,omitempty\"`          // query (Go agent)\n\tUser           string            `json:\"user,omitempty\"`           // query, test\n\tPassword       string            `json:\"password,omitempty\"`       // query, test\n\tFormat         string            `json:\"format,omitempty\"`         // query\n\tSettings       map[string]string `json:\"settings,omitempty\"`       // ClickHouse query settings (URL params)\n}\n\n// QueryResult represents a ClickHouse query result returned from the agent.\ntype QueryResult struct {\n\tData  json.RawMessage `json:\"data\"`\n\tMeta  json.RawMessage `json:\"meta\"`\n\tStats json.RawMessage `json:\"stats\"`\n}\n\n// TestResult represents a connection test result returned from the agent.\ntype TestResult struct {\n\tSuccess bool   `json:\"success\"`\n\tError   string `json:\"error,omitempty\"`\n\tVersion string `json:\"version,omitempty\"`\n}\n\n// StreamDone represents the final payload of a streaming query.\ntype StreamDone struct {\n\tStatistics json.RawMessage `json:\"statistics\"`\n\tTotalRows  int64          `json:\"total_rows\"`\n}\n\n// HostInfo represents machine info from the agent.\ntype HostInfo struct {\n\tHostname    string  `json:\"hostname\"`\n\tOS          string  `json:\"os\"`\n\tArch        string  `json:\"arch\"`\n\tCPUCores    int     `json:\"cpu_cores\"`\n\tMemoryTotal int64   `json:\"memory_total\"`\n\tMemoryFree  int64   `json:\"memory_free\"`\n\tDiskTotal   int64   `json:\"disk_total\"`\n\tDiskFree    int64   `json:\"disk_free\"`\n\tGoVersion   string  `json:\"go_version\"`\n\tAgentUptime float64 `json:\"agent_uptime\"`\n\tCollectedAt string  `json:\"collected_at\"`\n}\n"
  },
  {
    "path": "internal/version/version.go",
    "content": "package version\n\nvar (\n\tVersion   = \"dev\"\n\tCommit    = \"none\"\n\tBuildDate = \"unknown\"\n)\n\nfunc Set(v, c, d string) {\n\tVersion = v\n\tCommit = c\n\tBuildDate = d\n}\n"
  },
  {
    "path": "license/public.pem",
    "content": "-----BEGIN PUBLIC KEY-----\nMCowBQYDK2VwAyEA62CBTMWey4wS4Fknr/5Sfk7k1J7+4MYpBfxBPvKXRFg=\n-----END PUBLIC KEY-----\n"
  },
  {
    "path": "main.go",
    "content": "package main\n\nimport (\n\t\"github.com/caioricciuti/ch-ui/cmd\"\n\t\"github.com/caioricciuti/ch-ui/internal/version\"\n)\n\nvar (\n\tVersion   = \"dev\"\n\tCommit    = \"none\"\n\tBuildDate = \"unknown\"\n)\n\nfunc main() {\n\tversion.Set(Version, Commit, BuildDate)\n\tcmd.FrontendFS = frontendFS()\n\tcmd.Execute()\n}\n"
  },
  {
    "path": "ui/.gitignore",
    "content": "# Logs\nlogs\n*.log\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\npnpm-debug.log*\nlerna-debug.log*\n\nnode_modules\ndist\ndist-ssr\n*.local\n\n# Editor directories and files\n.vscode/*\n!.vscode/extensions.json\n.idea\n.DS_Store\n*.suo\n*.ntvs*\n*.njsproj\n*.sln\n*.sw?\n"
  },
  {
    "path": "ui/README.md",
    "content": "# Svelte + TS + Vite\n\nThis template should help get you started developing with Svelte and TypeScript in Vite.\n\n## Recommended IDE Setup\n\n[VS Code](https://code.visualstudio.com/) + [Svelte](https://marketplace.visualstudio.com/items?itemName=svelte.svelte-vscode).\n\n## Need an official Svelte framework?\n\nCheck out [SvelteKit](https://github.com/sveltejs/kit#readme), which is also powered by Vite. Deploy anywhere with its serverless-first approach and adapt to various platforms, with out of the box support for TypeScript, SCSS, and Less, and easily-added support for mdsvex, GraphQL, PostCSS, Tailwind CSS, and more.\n\n## Technical considerations\n\n**Why use this over SvelteKit?**\n\n- It brings its own routing solution which might not be preferable for some users.\n- It is first and foremost a framework that just happens to use Vite under the hood, not a Vite app.\n\nThis template contains as little as possible to get started with Vite + TypeScript + Svelte, while taking into account the developer experience with regards to HMR and intellisense. It demonstrates capabilities on par with the other `create-vite` templates and is a good starting point for beginners dipping their toes into a Vite + Svelte project.\n\nShould you later need the extended capabilities and extensibility provided by SvelteKit, the template has been structured similarly to SvelteKit so that it is easy to migrate.\n\n**Why `global.d.ts` instead of `compilerOptions.types` inside `jsconfig.json` or `tsconfig.json`?**\n\nSetting `compilerOptions.types` shuts out all other types not explicitly listed in the configuration. Using triple-slash references keeps the default TypeScript setting of accepting type information from the entire workspace, while also adding `svelte` and `vite/client` type information.\n\n**Why include `.vscode/extensions.json`?**\n\nOther templates indirectly recommend extensions via the README, but this file allows VS Code to prompt the user to install the recommended extension upon opening the project.\n\n**Why enable `allowJs` in the TS template?**\n\nWhile `allowJs: false` would indeed prevent the use of `.js` files in the project, it does not prevent the use of JavaScript syntax in `.svelte` files. In addition, it would force `checkJs: false`, bringing the worst of both worlds: not being able to guarantee the entire codebase is TypeScript, and also having worse typechecking for the existing JavaScript. In addition, there are valid use cases in which a mixed codebase may be relevant.\n\n**Why is HMR not preserving my local component state?**\n\nHMR state preservation comes with a number of gotchas! It has been disabled by default in both `svelte-hmr` and `@sveltejs/vite-plugin-svelte` due to its often surprising behavior. You can read the details [here](https://github.com/rixo/svelte-hmr#svelte-hmr).\n\nIf you have state that's important to retain within a component, consider creating an external store which would not be replaced by HMR.\n\n```ts\n// store.ts\n// An extremely simple external store\nimport { writable } from 'svelte/store'\nexport default writable(0)\n```\n"
  },
  {
    "path": "ui/index.html",
    "content": "<!doctype html>\n<html lang=\"en\" class=\"h-full\">\n  <head>\n    <meta charset=\"UTF-8\" />\n    <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\" />\n    <title>CH-UI</title>\n    <script>\n      const savedTheme = localStorage.getItem('ch-ui-theme') || 'dark'\n      if (savedTheme === 'dark') document.documentElement.classList.add('dark')\n    </script>\n  </head>\n  <body class=\"h-full text-gray-900 dark:text-gray-100 antialiased\">\n    <div id=\"app\" class=\"h-full\"></div>\n    <script type=\"module\" src=\"/src/main.ts\"></script>\n  </body>\n</html>\n"
  },
  {
    "path": "ui/package.json",
    "content": "{\n  \"name\": \"ui\",\n  \"private\": true,\n  \"version\": \"0.0.0\",\n  \"type\": \"module\",\n  \"scripts\": {\n    \"dev\": \"vite\",\n    \"build\": \"vite build\",\n    \"preview\": \"vite preview\",\n    \"check\": \"svelte-check --tsconfig ./tsconfig.app.json && tsc -p tsconfig.node.json\",\n    \"test\": \"vitest run --config vitest.config.ts\"\n  },\n  \"devDependencies\": {\n    \"@sveltejs/vite-plugin-svelte\": \"^6.2.4\",\n    \"@tsconfig/svelte\": \"^5.0.8\",\n    \"@types/json-bigint\": \"^1.0.4\",\n    \"@types/node\": \"^24.10.13\",\n    \"json-bigint\": \"^1.0.0\",\n    \"svelte\": \"^5.53.2\",\n    \"svelte-check\": \"^4.4.3\",\n    \"typescript\": \"~5.9.3\",\n    \"vite\": \"^7.3.1\",\n    \"vitest\": \"^2.1.9\"\n  },\n  \"dependencies\": {\n    \"@codemirror/autocomplete\": \"^6.20.0\",\n    \"@codemirror/commands\": \"^6.10.2\",\n    \"@codemirror/lang-sql\": \"^6.10.0\",\n    \"@codemirror/language\": \"^6.12.1\",\n    \"@codemirror/search\": \"^6.6.0\",\n    \"@codemirror/state\": \"^6.5.4\",\n    \"@codemirror/theme-one-dark\": \"^6.1.3\",\n    \"@codemirror/view\": \"^6.39.15\",\n    \"@tailwindcss/vite\": \"^4.2.0\",\n    \"@xyflow/svelte\": \"^1.5.1\",\n    \"lucide-svelte\": \"^0.563.0\",\n    \"marked\": \"^15.0.7\",\n    \"svelte-sonner\": \"^1.0.7\",\n    \"tailwindcss\": \"^4.2.0\",\n    \"uplot\": \"^1.6.32\"\n  },\n  \"overrides\": {\n    \"@codemirror/state\": \"$@codemirror/state\",\n    \"@codemirror/view\": \"$@codemirror/view\"\n  }\n}\n"
  },
  {
    "path": "ui/src/App.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import { initSession, isAuthenticated, isLoading } from './lib/stores/session.svelte'\n  import { initRouter } from './lib/stores/router.svelte'\n\n  import Shell from './lib/components/layout/Shell.svelte'\n  import Toast from './lib/components/common/Toast.svelte'\n  import Spinner from './lib/components/common/Spinner.svelte'\n  import Login from './pages/Login.svelte'\n  import logo from './assets/logo.png'\n\n  onMount(async () => {\n    await initSession()\n    initRouter()\n  })\n\n  const authenticated = $derived(isAuthenticated())\n  const loading = $derived(isLoading())\n</script>\n\n<Toast />\n\n{#if loading}\n  <div class=\"flex flex-col items-center justify-center h-full gap-3\">\n    <img src={logo} alt=\"CH-UI\" class=\"w-12 h-12 rounded-xl ring-1 ring-white/20\" />\n    <Spinner size=\"lg\" />\n    <p class=\"text-xs text-gray-500 dark:text-gray-400\">Loading CH-UI workspace...</p>\n  </div>\n{:else if !authenticated}\n  <Login />\n{:else}\n  <Shell />\n{/if}\n"
  },
  {
    "path": "ui/src/app.css",
    "content": "@import \"tailwindcss\";\n\n@custom-variant dark (&:where(.dark, .dark *));\n\n@theme {\n  --font-sans: \"Inter\", \"SF Pro Text\", -apple-system, BlinkMacSystemFont, sans-serif;\n  --font-mono: \"JetBrains Mono\", \"Fira Code\", ui-monospace, monospace;\n\n  /* Compatibility token still referenced in components */\n  --color-ch-blue: #f97316;\n  --color-ch-orange: #f97316;\n  --color-ch-green: #10b981;\n\n  /* Neutral grayscale (no blue cast) */\n  --color-gray-50: #f7f7f8;\n  --color-gray-100: #efeff1;\n  --color-gray-200: #dfdfe2;\n  --color-gray-300: #c8c9ce;\n  --color-gray-400: #a2a5ad;\n  --color-gray-500: #7a7e88;\n  --color-gray-600: #5f646d;\n  --color-gray-700: #494d55;\n  --color-gray-800: #33363d;\n  --color-gray-900: #1f2126;\n  --color-gray-950: #121316;\n}\n\n@theme inline {\n  --radius-sm: calc(var(--radius) - 4px);\n  --radius-md: calc(var(--radius) - 2px);\n  --radius-lg: var(--radius);\n  --radius-xl: calc(var(--radius) + 4px);\n\n  --color-background: var(--background);\n  --color-foreground: var(--foreground);\n  --color-card: var(--card);\n  --color-card-foreground: var(--card-foreground);\n  --color-popover: var(--popover);\n  --color-popover-foreground: var(--popover-foreground);\n  --color-primary: var(--primary);\n  --color-primary-foreground: var(--primary-foreground);\n  --color-secondary: var(--secondary);\n  --color-secondary-foreground: var(--secondary-foreground);\n  --color-muted: var(--muted);\n  --color-muted-foreground: var(--muted-foreground);\n  --color-accent: var(--accent);\n  --color-accent-foreground: var(--accent-foreground);\n  --color-destructive: var(--destructive);\n  --color-border: var(--border);\n  --color-input: var(--input);\n  --color-ring: var(--ring);\n}\n\n:root {\n  --radius: 0.425rem;\n  --background: #f7f7f8;\n  --foreground: #1b1d22;\n  --card: #ffffff;\n  --card-foreground: #1b1d22;\n  --popover: #ffffff;\n  --popover-foreground: #1b1d22;\n  --primary: #f97316;\n  --primary-foreground: #fffaf5;\n  --secondary: #efeff1;\n  --secondary-foreground: #2a2d33;\n  --muted: #efeff1;\n  --muted-foreground: #626773;\n  --accent: #efeff1;\n  --accent-foreground: #2a2d33;\n  --destructive: #dc2626;\n  --border: #d6d7dc;\n  --input: #d6d7dc;\n  --ring: #fb923c;\n}\n\n.dark {\n  --background: #131418;\n  --foreground: #f3f4f6;\n  --card: #1b1d22;\n  --card-foreground: #f3f4f6;\n  --popover: #1b1d22;\n  --popover-foreground: #f3f4f6;\n  --primary: #fb923c;\n  --primary-foreground: #1a1613;\n  --secondary: #282b32;\n  --secondary-foreground: #f3f4f6;\n  --muted: #282b32;\n  --muted-foreground: #a4a8b2;\n  --accent: #282b32;\n  --accent-foreground: #f3f4f6;\n  --destructive: #ef4444;\n  --border: rgba(255, 255, 255, 0.11);\n  --input: rgba(255, 255, 255, 0.14);\n  --ring: #fdba74;\n}\n\nhtml,\nbody {\n  overflow: hidden;\n}\n\n* {\n  border-color: var(--border);\n  outline-color: color-mix(in oklab, var(--ring), transparent 50%);\n  scrollbar-width: thin;\n  scrollbar-color: rgba(123, 123, 123, 0.45) transparent;\n}\n\nbody {\n  background: var(--background);\n  color: var(--foreground);\n  font-family: var(--font-sans);\n  font-weight: 450;\n  -webkit-font-smoothing: antialiased;\n  text-rendering: optimizeLegibility;\n}\n\nh1,\nh2,\nh3 {\n  letter-spacing: -0.012em;\n}\n\ncode,\npre,\n.font-mono {\n  font-family: var(--font-mono);\n}\n\n.surface-card {\n  background: color-mix(in oklab, var(--card), transparent 1%);\n  border: 1px solid color-mix(in oklab, var(--border), transparent 8%);\n  box-shadow: 0 8px 24px rgba(10, 10, 10, 0.18);\n}\n\n.dark .surface-card {\n  box-shadow: 0 10px 28px rgba(0, 0, 0, 0.34);\n}\n\n.brand-pill {\n  background: color-mix(in oklab, #f97316 16%, transparent);\n  border: 1px solid color-mix(in oklab, #f97316 38%, transparent);\n  color: #f97316;\n}\n\n.dark .brand-pill {\n  background: color-mix(in oklab, #fb923c 18%, transparent);\n  border-color: color-mix(in oklab, #fb923c 40%, transparent);\n  color: #fdba74;\n}\n\n::-webkit-scrollbar {\n  width: 8px;\n  height: 8px;\n}\n\n::-webkit-scrollbar-track {\n  background: transparent;\n}\n\n::-webkit-scrollbar-thumb {\n  background-color: rgba(123, 123, 123, 0.45);\n  border-radius: 10px;\n}\n\n::-webkit-scrollbar-thumb:hover {\n  background-color: rgba(123, 123, 123, 0.62);\n}\n\n@layer components {\n  .ds-page-header {\n    @apply flex items-center justify-between px-4 py-3 border-b border-gray-200 dark:border-gray-800;\n  }\n\n  .ds-page-title {\n    @apply text-xl font-semibold text-gray-900 dark:text-gray-100;\n  }\n\n  .ds-page-subtitle {\n    @apply text-xs text-gray-500 dark:text-gray-400;\n  }\n\n  .ds-tabs {\n    @apply flex gap-1 px-4 pt-2 border-b border-gray-200 dark:border-gray-800;\n  }\n\n  .ds-tab {\n    @apply px-3.5 py-2.5 text-[13px] font-semibold rounded-t transition-colors text-gray-500 hover:text-gray-700 dark:hover:text-gray-300;\n  }\n\n  .ds-tab-active {\n    @apply text-ch-blue border-b-2 border-ch-blue bg-orange-50 dark:bg-orange-950/20;\n  }\n\n  .ds-card {\n    @apply rounded-lg border border-gray-200 dark:border-gray-800 bg-transparent;\n  }\n\n  .ds-panel {\n    @apply rounded-lg border border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-900;\n    box-shadow: 0 8px 24px rgba(10, 10, 10, 0.18);\n  }\n\n  .dark .ds-panel {\n    box-shadow: 0 10px 28px rgba(0, 0, 0, 0.34);\n  }\n\n  .ds-panel-muted {\n    @apply rounded-lg border border-gray-200 dark:border-gray-800 bg-gray-100/70 dark:bg-gray-900/70;\n  }\n\n  .ds-stat-card {\n    @apply rounded-lg border border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-900 p-4;\n  }\n\n  .ds-form-label {\n    @apply block text-xs font-medium text-gray-700 dark:text-gray-300 mb-1;\n  }\n\n  .ds-input {\n    @apply w-full rounded border border-gray-300 dark:border-gray-700 bg-transparent px-3 py-2 text-[14px] text-gray-700 dark:text-gray-300;\n    @apply focus:outline-none focus:ring-2 focus:ring-ch-blue/40 focus:border-ch-blue;\n  }\n\n  .ds-input-sm {\n    @apply w-full rounded border border-gray-300 dark:border-gray-700 bg-transparent px-2.5 py-1.5 text-[13px] text-gray-700 dark:text-gray-300;\n    @apply focus:outline-none focus:ring-2 focus:ring-ch-blue/40 focus:border-ch-blue;\n  }\n\n  .ds-select {\n    @apply rounded border border-gray-300 dark:border-gray-700 bg-transparent px-2.5 py-1.5 text-[13px] text-gray-700 dark:text-gray-300;\n    @apply focus:outline-none focus:ring-2 focus:ring-ch-blue/40 focus:border-ch-blue;\n  }\n\n  .ds-textarea {\n    @apply w-full rounded border border-gray-300 dark:border-gray-700 bg-transparent px-3 py-2 text-sm text-gray-700 dark:text-gray-300;\n    @apply focus:outline-none focus:ring-2 focus:ring-ch-blue/40 focus:border-ch-blue;\n  }\n\n  .ds-btn-primary {\n    @apply inline-flex items-center justify-center gap-1.5 rounded px-3 py-1.5 text-[13px] font-medium text-white bg-ch-blue border border-orange-500 transition-colors;\n    @apply hover:bg-orange-600;\n  }\n\n  .ds-btn-ghost {\n    @apply inline-flex items-center justify-center gap-1 rounded p-1.5 text-gray-500 transition-colors;\n    @apply hover:text-ch-blue hover:bg-gray-200 dark:hover:bg-gray-800;\n  }\n\n  .ds-btn-outline {\n    @apply inline-flex items-center justify-center gap-1 rounded border border-gray-300 dark:border-gray-700 px-2.5 py-1.5 text-[13px] font-medium text-gray-700 dark:text-gray-300 transition-colors;\n    @apply hover:border-ch-blue hover:text-ch-blue;\n  }\n\n  .ds-icon-btn {\n    @apply inline-flex items-center justify-center rounded p-1.5 text-gray-500 transition-colors;\n    @apply hover:text-ch-blue hover:bg-gray-200 dark:hover:bg-gray-800;\n  }\n\n  .ds-segment {\n    @apply inline-flex rounded-lg border border-gray-300/80 dark:border-gray-700/80 bg-gray-100/70 dark:bg-gray-900/65 p-1;\n  }\n\n  .ds-segment-btn {\n    @apply px-2.5 h-7 rounded-md text-xs transition-colors text-gray-500 hover:text-gray-700 dark:hover:text-gray-300;\n  }\n\n  .ds-segment-btn-active {\n    @apply bg-orange-100 dark:bg-orange-500/15 text-ch-orange;\n  }\n\n  .ds-table-wrap {\n    @apply overflow-x-auto;\n  }\n\n  .ds-table {\n    @apply w-full text-sm;\n  }\n\n  .ds-table-head-row {\n    @apply border-b border-gray-200 dark:border-gray-800;\n  }\n\n  .ds-table-th {\n    @apply text-left py-2 px-3 text-gray-500 font-medium;\n  }\n\n  .ds-table-th-right {\n    @apply text-right py-2 px-3 text-gray-500 font-medium;\n  }\n\n  .ds-table-th-compact {\n    @apply text-left py-1.5 px-2 text-[11px] text-gray-500 font-medium;\n  }\n\n  .ds-table-th-right-compact {\n    @apply text-right py-1.5 px-2 text-[11px] text-gray-500 font-medium;\n  }\n\n  .ds-table-row {\n    @apply border-b border-gray-100 dark:border-gray-900 hover:bg-gray-50 dark:hover:bg-gray-900/50 transition-colors;\n  }\n\n  .ds-table-row-static {\n    @apply border-b border-gray-100 dark:border-gray-900;\n  }\n\n  .ds-td {\n    @apply py-2 px-3 text-gray-500 dark:text-gray-400;\n  }\n\n  .ds-td-strong {\n    @apply py-2 px-3 text-gray-800 dark:text-gray-200 font-medium;\n  }\n\n  .ds-td-right {\n    @apply py-2 px-3 text-right text-gray-500 dark:text-gray-400;\n  }\n\n  .ds-td-mono {\n    @apply py-2 px-3 text-xs text-gray-500 font-mono;\n  }\n\n  .ds-td-compact {\n    @apply py-1.5 px-2 text-xs text-gray-500 dark:text-gray-400;\n  }\n\n  .ds-td-compact-strong {\n    @apply py-1.5 px-2 text-xs text-gray-800 dark:text-gray-200 font-medium;\n  }\n\n  .ds-badge {\n    @apply inline-flex items-center px-1.5 py-0.5 rounded text-[11px] font-medium;\n  }\n\n  .ds-badge-neutral {\n    @apply bg-gray-200 dark:bg-gray-800 text-gray-700 dark:text-gray-300;\n  }\n\n  .ds-badge-success {\n    @apply bg-green-100 dark:bg-green-900/30 text-green-700 dark:text-green-300;\n  }\n\n  .ds-badge-danger {\n    @apply bg-red-100 dark:bg-red-900/30 text-red-700 dark:text-red-300;\n  }\n\n  .ds-badge-warn {\n    @apply bg-yellow-100 dark:bg-yellow-900/30 text-yellow-800 dark:text-yellow-300;\n  }\n\n  .ds-badge-brand {\n    @apply border border-orange-200 dark:border-orange-700/60 bg-orange-100 dark:bg-orange-500/15 text-orange-900 dark:text-orange-200;\n  }\n\n  .ds-empty {\n    @apply text-center py-10 rounded-lg border border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-900;\n  }\n\n  .ds-checkbox {\n    @apply h-4 w-4 rounded border border-gray-300 dark:border-gray-700 bg-white dark:bg-gray-900 accent-ch-blue;\n    @apply focus:outline-none focus:ring-2 focus:ring-ch-blue/35 focus:ring-offset-0;\n  }\n\n  .ds-checkbox-sm {\n    @apply h-3.5 w-3.5;\n  }\n\n  .ds-checkbox-label {\n    @apply inline-flex items-center gap-2 text-sm text-gray-700 dark:text-gray-300 select-none;\n  }\n\n  .ds-radio {\n    @apply h-4 w-4 border border-gray-300 dark:border-gray-700 bg-white dark:bg-gray-900 accent-ch-blue;\n    @apply focus:outline-none focus:ring-2 focus:ring-ch-blue/35 focus:ring-offset-0;\n  }\n\n  /* Brain chat markdown output styling */\n  .prose-brain h1 { @apply text-lg font-bold mt-4 mb-2; }\n  .prose-brain h2 { @apply text-base font-bold mt-3 mb-1.5; }\n  .prose-brain h3 { @apply text-sm font-semibold mt-3 mb-1; }\n  .prose-brain h4, .prose-brain h5, .prose-brain h6 { @apply text-sm font-semibold mt-2 mb-1; }\n  .prose-brain p { @apply my-1.5 leading-relaxed; }\n  .prose-brain ul { @apply list-disc pl-5 my-1.5 space-y-0.5; }\n  .prose-brain ol { @apply list-decimal pl-5 my-1.5 space-y-0.5; }\n  .prose-brain li { @apply leading-relaxed; }\n  .prose-brain a { @apply text-ch-blue hover:underline; }\n  .prose-brain blockquote { @apply border-l-2 border-gray-300 dark:border-gray-600 pl-3 my-2 text-gray-600 dark:text-gray-400 italic; }\n  .prose-brain pre { @apply bg-gray-100 dark:bg-gray-800 rounded-lg p-3 my-2 overflow-x-auto text-xs font-mono; }\n  .prose-brain code { @apply bg-gray-200 dark:bg-gray-700 px-1 py-0.5 rounded text-xs font-mono; }\n  .prose-brain pre code { @apply bg-transparent p-0; }\n  .prose-brain table { @apply min-w-full text-xs my-2 border border-gray-200 dark:border-gray-700 rounded; }\n  .prose-brain th { @apply px-2 py-1 text-left bg-gray-100 dark:bg-gray-800 border-b border-gray-200 dark:border-gray-700 font-semibold; }\n  .prose-brain td { @apply px-2 py-1 border-b border-gray-200/70 dark:border-gray-700/70; }\n  .prose-brain hr { @apply my-3 border-gray-200 dark:border-gray-700; }\n  .prose-brain strong { @apply font-semibold; }\n  .prose-brain em { @apply italic; }\n}\n"
  },
  {
    "path": "ui/src/lib/api/alerts.ts",
    "content": "import { apiDel, apiGet, apiPost, apiPut } from './client'\nimport type { AlertChannel, AlertEvent, AlertRule } from '../types/alerts'\n\nconst BASE = '/api/governance/alerts'\n\nexport type AlertRuleRoutePayload = {\n  channel_id: string\n  recipients: string[]\n  is_active: boolean\n  delivery_mode?: 'immediate' | 'digest'\n  digest_window_minutes?: number\n  escalation_channel_id?: string\n  escalation_recipients?: string[]\n  escalation_after_failures?: number\n}\n\nexport async function adminListAlertChannels(): Promise<AlertChannel[]> {\n  const res = await apiGet<{ channels: AlertChannel[] }>(`${BASE}/channels`)\n  return res.channels ?? []\n}\n\nexport async function adminCreateAlertChannel(payload: {\n  name: string\n  channel_type: 'smtp' | 'resend' | 'brevo'\n  is_active: boolean\n  config: Record<string, unknown>\n}): Promise<void> {\n  await apiPost(`${BASE}/channels`, payload)\n}\n\nexport async function adminUpdateAlertChannel(id: string, payload: {\n  name?: string\n  channel_type?: 'smtp' | 'resend' | 'brevo'\n  is_active?: boolean\n  config?: Record<string, unknown>\n}): Promise<void> {\n  await apiPut(`${BASE}/channels/${encodeURIComponent(id)}`, payload)\n}\n\nexport async function adminDeleteAlertChannel(id: string): Promise<void> {\n  await apiDel(`${BASE}/channels/${encodeURIComponent(id)}`)\n}\n\nexport async function adminTestAlertChannel(id: string, payload: {\n  recipients: string[]\n  subject?: string\n  message?: string\n}): Promise<{ provider_message_id?: string }> {\n  return apiPost<{ provider_message_id?: string }>(`${BASE}/channels/${encodeURIComponent(id)}/test`, payload)\n}\n\nexport async function adminListAlertRules(): Promise<AlertRule[]> {\n  const res = await apiGet<{ rules: AlertRule[] }>(`${BASE}/rules`)\n  return res.rules ?? []\n}\n\nexport async function adminCreateAlertRule(payload: {\n  name: string\n  event_type: string\n  severity_min: string\n  enabled: boolean\n  cooldown_seconds: number\n  max_attempts: number\n  subject_template?: string\n  body_template?: string\n  routes: AlertRuleRoutePayload[]\n}): Promise<void> {\n  await apiPost(`${BASE}/rules`, payload)\n}\n\nexport async function adminUpdateAlertRule(id: string, payload: {\n  name?: string\n  event_type?: string\n  severity_min?: string\n  enabled?: boolean\n  cooldown_seconds?: number\n  max_attempts?: number\n  subject_template?: string\n  body_template?: string\n  routes?: AlertRuleRoutePayload[]\n}): Promise<void> {\n  await apiPut(`${BASE}/rules/${encodeURIComponent(id)}`, payload)\n}\n\nexport async function adminDeleteAlertRule(id: string): Promise<void> {\n  await apiDel(`${BASE}/rules/${encodeURIComponent(id)}`)\n}\n\nexport async function adminListAlertEvents(params: { limit?: number; eventType?: string; status?: string } = {}): Promise<AlertEvent[]> {\n  const q = new URLSearchParams()\n  if (params.limit) q.set('limit', String(params.limit))\n  if (params.eventType) q.set('event_type', params.eventType)\n  if (params.status) q.set('status', params.status)\n  const url = `${BASE}/events${q.toString() ? `?${q.toString()}` : ''}`\n  const res = await apiGet<{ events: AlertEvent[] }>(url)\n  return res.events ?? []\n}\n"
  },
  {
    "path": "ui/src/lib/api/auth.ts",
    "content": "import { apiGet, apiPost } from './client'\nimport type { Session, Connection } from '../types/api'\n\ninterface LoginParams {\n  connectionId: string\n  username: string\n  password: string\n}\n\ninterface LoginResponse {\n  success: boolean\n  session: Session\n}\n\ninterface ConnectionsResponse {\n  success: boolean\n  connections: Connection[]\n}\n\ninterface SessionResponse {\n  success: boolean\n  session: Session\n}\n\n/** Log in to a ClickHouse connection */\nexport function login(params: LoginParams): Promise<LoginResponse> {\n  return apiPost<LoginResponse>('/api/auth/login', params)\n}\n\n/** Log out and destroy the session */\nexport function logout(): Promise<void> {\n  return apiPost('/api/auth/logout')\n}\n\n/** Check if a valid session exists */\nexport async function checkSession(): Promise<Session | null> {\n  try {\n    const res = await apiGet<{ authenticated: boolean; session?: Session }>('/api/auth/session')\n    if (!res.authenticated) return null\n    return res.session ?? null\n  } catch {\n    return null\n  }\n}\n\n/** List all connections (with online/offline status) */\nexport async function listConnections(): Promise<Connection[]> {\n  const res = await apiGet<ConnectionsResponse>('/api/auth/connections')\n  return res.connections ?? []\n}\n"
  },
  {
    "path": "ui/src/lib/api/brain.ts",
    "content": "import { withBase } from '../basePath'\nimport { apiDel, apiGet, apiPost, apiPut } from './client'\nimport type {\n  BrainArtifact,\n  BrainChat,\n  BrainMessage,\n  BrainModelOption,\n  BrainProviderAdmin,\n  BrainSkill,\n} from '../types/brain'\n\nexport async function listBrainModels(): Promise<BrainModelOption[]> {\n  const res = await apiGet<{ success: boolean; models: BrainModelOption[] }>('/api/brain/models')\n  return res.models ?? []\n}\n\nexport async function listBrainChats(includeArchived = false): Promise<BrainChat[]> {\n  const res = await apiGet<{ success: boolean; chats: BrainChat[] }>(`/api/brain/chats?includeArchived=${includeArchived}`)\n  return res.chats ?? []\n}\n\nexport async function createBrainChat(payload: { title?: string; modelId?: string }): Promise<BrainChat> {\n  const res = await apiPost<{ success: boolean; chat: BrainChat }>('/api/brain/chats', payload)\n  return res.chat\n}\n\nexport async function updateBrainChat(chatId: string, payload: { title?: string; archived?: boolean; modelId?: string; contextDatabase?: string; contextTable?: string; contextTables?: string }): Promise<void> {\n  await apiPut(`/api/brain/chats/${encodeURIComponent(chatId)}`, payload)\n}\n\nexport async function deleteBrainChat(chatId: string): Promise<void> {\n  await apiDel(`/api/brain/chats/${encodeURIComponent(chatId)}`)\n}\n\nexport async function listBrainMessages(chatId: string): Promise<BrainMessage[]> {\n  const res = await apiGet<{ success: boolean; messages: BrainMessage[] }>(`/api/brain/chats/${encodeURIComponent(chatId)}/messages`)\n  return res.messages ?? []\n}\n\nexport async function listBrainArtifacts(chatId: string): Promise<BrainArtifact[]> {\n  const res = await apiGet<{ success: boolean; artifacts: BrainArtifact[] }>(`/api/brain/chats/${encodeURIComponent(chatId)}/artifacts`)\n  return res.artifacts ?? []\n}\n\nexport async function runBrainQueryArtifact(chatId: string, payload: { query: string; title?: string; messageId?: string; timeout?: number }): Promise<any> {\n  return apiPost(`/api/brain/chats/${encodeURIComponent(chatId)}/artifacts/query`, payload)\n}\n\nexport interface StreamEvent {\n  type: 'delta' | 'done' | 'error'\n  delta?: string\n  error?: string\n  messageId?: string\n  chatId?: string\n}\n\nexport async function streamBrainMessage(\n  chatId: string,\n  payload: { content: string; modelId?: string; schemaContext?: any; schemaContexts?: any[] },\n  onEvent: (event: StreamEvent) => void,\n): Promise<void> {\n  const response = await fetch(withBase(`/api/brain/chats/${encodeURIComponent(chatId)}/messages/stream`), {\n    method: 'POST',\n    credentials: 'include',\n    headers: { 'Content-Type': 'application/json' },\n    body: JSON.stringify(payload),\n  })\n\n  if (!response.ok) {\n    const body = await response.json().catch(() => ({}))\n    throw new Error(body.error ?? `Request failed (${response.status})`)\n  }\n\n  const reader = response.body?.getReader()\n  if (!reader) throw new Error('No response body')\n\n  const decoder = new TextDecoder()\n  let buffer = ''\n\n  while (true) {\n    const { done, value } = await reader.read()\n    if (done) break\n\n    buffer += decoder.decode(value, { stream: true })\n    const lines = buffer.split('\\n')\n    buffer = lines.pop() ?? ''\n\n    for (const line of lines) {\n      if (!line.startsWith('data: ')) continue\n      const raw = line.slice(6)\n      if (!raw) continue\n      try {\n        const parsed = JSON.parse(raw) as StreamEvent\n        onEvent(parsed)\n      } catch {\n        // ignore malformed chunks\n      }\n    }\n  }\n}\n\n// -------- Admin endpoints --------\n\nexport async function adminListBrainProviders(): Promise<BrainProviderAdmin[]> {\n  const res = await apiGet<{ success: boolean; providers: BrainProviderAdmin[] }>('/api/admin/brain/providers')\n  return res.providers ?? []\n}\n\nexport async function adminCreateBrainProvider(payload: {\n  name: string\n  kind: string\n  baseUrl?: string\n  apiKey?: string\n  isActive?: boolean\n  isDefault?: boolean\n}): Promise<void> {\n  await apiPost('/api/admin/brain/providers', payload)\n}\n\nexport async function adminUpdateBrainProvider(id: string, payload: {\n  name?: string\n  kind?: string\n  baseUrl?: string\n  apiKey?: string\n  isActive?: boolean\n  isDefault?: boolean\n}): Promise<void> {\n  await apiPut(`/api/admin/brain/providers/${encodeURIComponent(id)}`, payload)\n}\n\nexport async function adminDeleteBrainProvider(id: string): Promise<void> {\n  await apiDel(`/api/admin/brain/providers/${encodeURIComponent(id)}`)\n}\n\nexport async function adminSyncBrainProviderModels(id: string): Promise<void> {\n  await apiPost(`/api/admin/brain/providers/${encodeURIComponent(id)}/sync-models`)\n}\n\nexport async function adminListBrainModels(): Promise<BrainModelOption[]> {\n  const res = await apiGet<{ success: boolean; models: BrainModelOption[] }>('/api/admin/brain/models')\n  return res.models ?? []\n}\n\nexport async function adminUpdateBrainModel(id: string, payload: {\n  displayName?: string\n  isActive?: boolean\n  isDefault?: boolean\n}): Promise<void> {\n  await apiPut(`/api/admin/brain/models/${encodeURIComponent(id)}`, payload)\n}\n\nexport async function adminBulkUpdateBrainModels(payload: {\n  providerId: string\n  action: 'deactivate_all' | 'activate_all' | 'activate_recommended'\n}): Promise<void> {\n  await apiPost('/api/admin/brain/models/bulk', payload)\n}\n\nexport async function adminListBrainSkills(): Promise<BrainSkill[]> {\n  const res = await apiGet<{ success: boolean; skills: BrainSkill[] }>('/api/admin/brain/skills')\n  return res.skills ?? []\n}\n\nexport async function adminCreateBrainSkill(payload: {\n  name: string\n  content: string\n  isActive?: boolean\n  isDefault?: boolean\n}): Promise<void> {\n  await apiPost('/api/admin/brain/skills', payload)\n}\n\nexport async function adminUpdateBrainSkill(id: string, payload: {\n  name?: string\n  content?: string\n  isActive?: boolean\n  isDefault?: boolean\n}): Promise<void> {\n  await apiPut(`/api/admin/brain/skills/${encodeURIComponent(id)}`, payload)\n}\n"
  },
  {
    "path": "ui/src/lib/api/client.ts",
    "content": "import { withBase } from '../basePath'\nimport { safeParse } from '../utils/safe-json'\n\n/** Base fetch wrapper with credentials and error handling */\nasync function parseResponseBody(res: Response): Promise<any> {\n  const contentType = (res.headers.get('content-type') || '').toLowerCase()\n  if (contentType.includes('application/json')) {\n    try {\n      const text = await res.text()\n      return safeParse(text)\n    } catch {\n      return null\n    }\n  }\n\n  const text = await res.text().catch(() => '')\n  if (!text) return null\n  return { message: text }\n}\n\nfunction buildErrorMessage(status: number, body: any): string {\n  const raw = body && typeof body === 'object'\n    ? String(body.message || body.error || '').trim()\n    : ''\n  if (status === 429) {\n    const retryAfter = body && typeof body === 'object' ? Number(body.retryAfter || body.retry_after || 0) : 0\n    if (raw && retryAfter > 0) return `${raw} (retry in ${retryAfter}s)`\n    if (raw) return raw\n    return 'Too many requests'\n  }\n  if (raw) return raw\n  return `Request failed (${status})`\n}\n\nasync function request<T = unknown>(\n  url: string,\n  options: RequestInit = {},\n): Promise<T> {\n  const isFormDataBody =\n    typeof FormData !== 'undefined' && options.body instanceof FormData\n\n  const res = await fetch(withBase(url), {\n    credentials: 'include',\n    headers: {\n      ...(isFormDataBody ? {} : { 'Content-Type': 'application/json' }),\n      ...options.headers,\n    },\n    ...options,\n  })\n\n  const body = await parseResponseBody(res)\n  const isAuthEndpoint = url.startsWith('/api/auth/')\n\n  if (res.status === 401 && !isAuthEndpoint) {\n    // Session expired — redirect to login\n    window.location.href = withBase('/login')\n    throw new Error('Session expired')\n  }\n\n  if (res.status === 402) {\n    throw new Error(buildErrorMessage(res.status, body) || 'Pro license required')\n  }\n\n  if (!res.ok || (body && body.success === false)) {\n    throw new Error(buildErrorMessage(res.status, body))\n  }\n\n  return body as T\n}\n\nexport function apiGet<T = unknown>(url: string): Promise<T> {\n  return request<T>(url)\n}\n\nexport function apiPost<T = unknown>(url: string, data?: unknown): Promise<T> {\n  return request<T>(url, {\n    method: 'POST',\n    body: data != null ? JSON.stringify(data) : undefined,\n  })\n}\n\nexport function apiPostForm<T = unknown>(url: string, data: FormData): Promise<T> {\n  return request<T>(url, {\n    method: 'POST',\n    body: data,\n  })\n}\n\nexport function apiPut<T = unknown>(url: string, data?: unknown): Promise<T> {\n  return request<T>(url, {\n    method: 'PUT',\n    body: data != null ? JSON.stringify(data) : undefined,\n  })\n}\n\nexport function apiDel<T = unknown>(url: string): Promise<T> {\n  return request<T>(url, { method: 'DELETE' })\n}\n"
  },
  {
    "path": "ui/src/lib/api/governance.ts",
    "content": "import { apiGet, apiPost, apiPut, apiDel } from './client'\nimport type {\n  GovernanceOverview,\n  GovernanceSettings,\n  SyncResult,\n  SyncState,\n  GovDatabase,\n  GovTable,\n  GovColumn,\n  SchemaChange,\n  QueryLogEntry,\n  TopQuery,\n  LineageGraph,\n  TagEntry,\n  ChUser,\n  ChRole,\n  AccessMatrixEntry,\n  OverPermission,\n  Policy,\n  PolicyViolation,\n  GovernanceObjectComment,\n  GovernanceIncident,\n  GovernanceIncidentComment,\n} from '../types/governance'\n\nconst BASE = '/api/governance'\n\n// ── Overview / Sync ─────────────────────────────────────────────\n\nexport function fetchOverview() {\n  return apiGet<{ overview?: GovernanceOverview } | GovernanceOverview>(`${BASE}/overview`)\n    .then((res: any) => res?.overview ?? res)\n}\n\nexport function triggerSync() {\n  return apiPost<SyncResult>(`${BASE}/sync`)\n}\n\nexport function triggerSingleSync(type: 'metadata' | 'query_log' | 'access') {\n  return apiPost<SyncResult>(`${BASE}/sync/${type}`)\n}\n\nexport function fetchSyncStatus() {\n  return apiGet<{ sync_states: SyncState[] }>(`${BASE}/sync/status`)\n}\n\n// ── Settings (admin) ────────────────────────────────────────────\n\nexport function fetchGovernanceSettings() {\n  return apiGet<GovernanceSettings>('/api/admin/governance/settings')\n}\n\nexport function updateGovernanceSettings(payload: {\n  sync_enabled?: boolean\n  banner_dismissed?: boolean\n}) {\n  return apiPut<GovernanceSettings>('/api/admin/governance/settings', payload)\n}\n\n// ── Metadata ────────────────────────────────────────────────────\n\nexport function fetchDatabases() {\n  return apiGet<{ databases: GovDatabase[] }>(`${BASE}/databases`)\n}\n\nexport function fetchTables(params?: { database?: string; tag?: string; search?: string }) {\n  const qs = new URLSearchParams()\n  if (params?.database) qs.set('database', params.database)\n  if (params?.tag) qs.set('tag', params.tag)\n  if (params?.search) qs.set('search', params.search)\n  const q = qs.toString()\n  return apiGet<{ tables: GovTable[] }>(`${BASE}/tables${q ? '?' + q : ''}`)\n}\n\nexport function fetchTableDetail(database: string, table: string) {\n  return apiGet<{\n    table: GovTable\n    columns: GovColumn[]\n    tags: TagEntry[]\n    recent_queries?: QueryLogEntry[]\n    queries?: QueryLogEntry[]\n    incoming: any[]\n    outgoing: any[]\n  }>(`${BASE}/tables/${encodeURIComponent(database)}/${encodeURIComponent(table)}`)\n    .then((res: any) => ({\n      ...res,\n      recent_queries: res?.recent_queries ?? res?.queries ?? [],\n    }))\n}\n\nexport function fetchTableNotes(database: string, table: string) {\n  return apiGet<{ notes: GovernanceObjectComment[] }>(`${BASE}/tables/${encodeURIComponent(database)}/${encodeURIComponent(table)}/notes`)\n}\n\nexport function fetchColumnNotes(database: string, table: string, column: string) {\n  return apiGet<{ notes: GovernanceObjectComment[] }>(`${BASE}/tables/${encodeURIComponent(database)}/${encodeURIComponent(table)}/columns/${encodeURIComponent(column)}/notes`)\n}\n\nexport function createTableNote(database: string, table: string, commentText: string) {\n  return apiPost<{ id: string }>(`${BASE}/tables/${encodeURIComponent(database)}/${encodeURIComponent(table)}/notes`, { comment_text: commentText })\n}\n\nexport function createColumnNote(database: string, table: string, column: string, commentText: string) {\n  return apiPost<{ id: string }>(`${BASE}/tables/${encodeURIComponent(database)}/${encodeURIComponent(table)}/columns/${encodeURIComponent(column)}/notes`, { comment_text: commentText })\n}\n\nexport function deleteObjectNote(id: string) {\n  return apiDel(`${BASE}/notes/${encodeURIComponent(id)}`)\n}\n\nexport function fetchSchemaChanges(limit = 50) {\n  return apiGet<{ changes: SchemaChange[] }>(`${BASE}/schema-changes?limit=${limit}`)\n}\n\n// ── Query Log ───────────────────────────────────────────────────\n\nexport function fetchQueryLog(params?: { user?: string; table?: string; limit?: number; offset?: number }) {\n  const qs = new URLSearchParams()\n  if (params?.user) qs.set('user', params.user)\n  if (params?.table) qs.set('table', params.table)\n  if (params?.limit) qs.set('limit', String(params.limit))\n  if (params?.offset) qs.set('offset', String(params.offset))\n  const q = qs.toString()\n  return apiGet<{ entries: QueryLogEntry[]; total: number }>(`${BASE}/query-log${q ? '?' + q : ''}`)\n}\n\nexport function fetchTopQueries(limit = 20) {\n  return apiGet<{ queries?: TopQuery[]; top_queries?: any[] }>(`${BASE}/query-log/top?limit=${limit}`)\n    .then((res: any) => {\n      const normalized = (res?.queries ?? res?.top_queries ?? []).map((q: any) => ({\n        normalized_hash: q?.normalized_hash ?? '',\n        count: Number(q?.count ?? q?.execution_count ?? 0),\n        avg_duration_ms: Number(q?.avg_duration_ms ?? q?.avg_duration ?? 0),\n        total_read_rows: Number(q?.total_read_rows ?? 0),\n        sample_query: q?.sample_query ?? q?.normalized_query ?? '',\n        last_seen: q?.last_seen ?? '',\n      })) as TopQuery[]\n      return { queries: normalized }\n    })\n}\n\n// ── Lineage ─────────────────────────────────────────────────────\n\nexport function fetchLineage(database: string, table: string) {\n  return apiGet<{ graph?: LineageGraph } | LineageGraph>(`${BASE}/lineage?database=${encodeURIComponent(database)}&table=${encodeURIComponent(table)}`)\n    .then((res: any) => res?.graph ?? res)\n}\n\nexport function fetchLineageGraph(includeColumns = false) {\n  const qs = includeColumns ? '?include_columns=true' : ''\n  return apiGet<{ graph?: LineageGraph } | LineageGraph>(`${BASE}/lineage/graph${qs}`)\n    .then((res: any) => res?.graph ?? res)\n}\n\nexport function fetchViewGraph() {\n  return apiGet<{ graph?: LineageGraph } | LineageGraph>(`${BASE}/view-graph`)\n    .then((res: any) => res?.graph ?? res)\n}\n\nexport function fetchLineageWithColumns(database: string, table: string) {\n  return apiGet<{ graph?: LineageGraph } | LineageGraph>(\n    `${BASE}/lineage?database=${encodeURIComponent(database)}&table=${encodeURIComponent(table)}&include_columns=true`\n  ).then((res: any) => res?.graph ?? res)\n}\n\nexport function fetchQueryByQueryID(queryId: string) {\n  return apiGet<{ entry: QueryLogEntry }>(`${BASE}/query-log/${encodeURIComponent(queryId)}`)\n}\n\n// ── Tags ────────────────────────────────────────────────────────\n\nexport function fetchTags(params?: { database?: string; table?: string }) {\n  const qs = new URLSearchParams()\n  if (params?.database) qs.set('database', params.database)\n  if (params?.table) qs.set('table', params.table)\n  const q = qs.toString()\n  return apiGet<{ tags: TagEntry[] }>(`${BASE}/tags${q ? '?' + q : ''}`)\n}\n\nexport function createTag(data: { object_type: string; database_name: string; table_name: string; column_name?: string; tag: string }) {\n  return apiPost<{ id: string }>(`${BASE}/tags`, data)\n}\n\nexport function deleteTag(id: string) {\n  return apiDel(`${BASE}/tags/${id}`)\n}\n\n// ── Access ──────────────────────────────────────────────────────\n\nexport function fetchAccessUsers() {\n  return apiGet<{ users: ChUser[] }>(`${BASE}/access/users`)\n}\n\nexport function fetchAccessRoles() {\n  return apiGet<{ roles: ChRole[] }>(`${BASE}/access/roles`)\n}\n\nexport function fetchAccessMatrix(user?: string) {\n  const q = user ? `?user=${encodeURIComponent(user)}` : ''\n  return apiGet<{ matrix: AccessMatrixEntry[] }>(`${BASE}/access/matrix${q}`)\n}\n\nexport function fetchOverPermissions(days = 30) {\n  return apiGet<{ over_permissions: OverPermission[] }>(`${BASE}/access/over-permissions?days=${days}`)\n}\n\n// ── Policies ────────────────────────────────────────────────────\n\nexport function fetchPolicies() {\n  return apiGet<{ policies: Policy[] }>(`${BASE}/policies`)\n}\n\nexport function createPolicy(data: Partial<Policy>) {\n  return apiPost<{ id: string }>(`${BASE}/policies`, data)\n}\n\nexport function getPolicy(id: string) {\n  return apiGet<Policy>(`${BASE}/policies/${id}`)\n}\n\nexport function updatePolicy(id: string, data: Partial<Policy>) {\n  return apiPut(`${BASE}/policies/${id}`, data)\n}\n\nexport function deletePolicy(id: string) {\n  return apiDel(`${BASE}/policies/${id}`)\n}\n\n// ── Violations ──────────────────────────────────────────────────\n\nexport function fetchViolations(params?: { policy_id?: string; limit?: number }) {\n  const qs = new URLSearchParams()\n  if (params?.policy_id) qs.set('policy_id', params.policy_id)\n  if (params?.limit) qs.set('limit', String(params.limit))\n  const q = qs.toString()\n  return apiGet<{ violations: PolicyViolation[] }>(`${BASE}/violations${q ? '?' + q : ''}`)\n}\n\nexport function promoteViolationToIncident(id: string) {\n  return apiPost<{ incident_id: string; created: boolean }>(`${BASE}/violations/${encodeURIComponent(id)}/incident`)\n}\n\nexport function fetchIncidents(params?: { status?: string; severity?: string; limit?: number }) {\n  const qs = new URLSearchParams()\n  if (params?.status) qs.set('status', params.status)\n  if (params?.severity) qs.set('severity', params.severity)\n  if (params?.limit) qs.set('limit', String(params.limit))\n  const q = qs.toString()\n  return apiGet<{ incidents: GovernanceIncident[] }>(`${BASE}/incidents${q ? '?' + q : ''}`)\n}\n\nexport function getIncident(id: string) {\n  return apiGet<{ incident: GovernanceIncident }>(`${BASE}/incidents/${encodeURIComponent(id)}`)\n}\n\nexport function createIncident(data: {\n  source_type?: string\n  source_ref?: string\n  title: string\n  severity?: string\n  status?: string\n  assignee?: string\n  details?: string\n}) {\n  return apiPost<{ id: string }>(`${BASE}/incidents`, data)\n}\n\nexport function updateIncident(id: string, data: {\n  title?: string\n  severity?: string\n  status?: string\n  assignee?: string\n  details?: string\n  resolution_note?: string\n}) {\n  return apiPut(`${BASE}/incidents/${encodeURIComponent(id)}`, data)\n}\n\nexport function fetchIncidentComments(id: string) {\n  return apiGet<{ comments: GovernanceIncidentComment[] }>(`${BASE}/incidents/${encodeURIComponent(id)}/comments`)\n}\n\nexport function createIncidentComment(id: string, commentText: string) {\n  return apiPost<{ id: string }>(`${BASE}/incidents/${encodeURIComponent(id)}/comments`, { comment_text: commentText })\n}\n"
  },
  {
    "path": "ui/src/lib/api/models.ts",
    "content": "import { apiGet, apiPost, apiPut, apiDel } from './client'\nimport type { Model, ModelRun, ModelRunResult, ModelDAG, ValidationResult, ModelSchedule, Pipeline } from '../types/models'\n\nconst BASE = '/api/models'\n\nexport function listModels() {\n  return apiGet<{ models: Model[] }>(BASE)\n}\n\nexport function createModel(data: {\n  name: string\n  description?: string\n  target_database: string\n  materialization: string\n  sql_body: string\n  table_engine?: string\n  order_by?: string\n}) {\n  return apiPost<{ model: Model }>(BASE, data)\n}\n\nexport function getModel(id: string) {\n  return apiGet<{ model: Model }>(`${BASE}/${id}`)\n}\n\nexport function updateModel(id: string, data: Partial<Omit<Model, 'id' | 'connection_id' | 'created_at' | 'updated_at'>>) {\n  return apiPut<{ model: Model }>(`${BASE}/${id}`, data)\n}\n\nexport function deleteModel(id: string) {\n  return apiDel(`${BASE}/${id}`)\n}\n\nexport function getDAG() {\n  return apiGet<ModelDAG>(`${BASE}/dag`)\n}\n\nexport function validateModels() {\n  return apiGet<ValidationResult>(`${BASE}/validate`)\n}\n\nexport function runAllModels() {\n  return apiPost<{ run_id: string }>(`${BASE}/run`)\n}\n\nexport function runSingleModel(id: string) {\n  return apiPost<{ run_id: string }>(`${BASE}/${id}/run`)\n}\n\nexport function listModelRuns(limit = 20, offset = 0) {\n  return apiGet<{ runs: ModelRun[] }>(`${BASE}/runs?limit=${limit}&offset=${offset}`)\n}\n\nexport function getModelRun(runId: string) {\n  return apiGet<{ run: ModelRun; results: ModelRunResult[] }>(`${BASE}/runs/${runId}`)\n}\n\nexport function listPipelines() {\n  return apiGet<{ pipelines: Pipeline[] }>(`${BASE}/pipelines`)\n}\n\nexport function runPipeline(anchorId: string) {\n  return apiPost<{ run_id: string }>(`${BASE}/pipelines/${anchorId}/run`)\n}\n\nexport function getPipelineSchedule(anchorId: string) {\n  return apiGet<{ schedule: ModelSchedule | null }>(`${BASE}/schedule/${anchorId}`)\n}\n\nexport function upsertPipelineSchedule(anchorId: string, data: { cron: string; enabled: boolean }) {\n  return apiPut<{ schedule: ModelSchedule }>(`${BASE}/schedule/${anchorId}`, data)\n}\n\nexport function deletePipelineSchedule(anchorId: string) {\n  return apiDel(`${BASE}/schedule/${anchorId}`)\n}\n"
  },
  {
    "path": "ui/src/lib/api/pipelines.ts",
    "content": "import { apiGet, apiPost, apiPut, apiDel } from './client'\nimport type { Pipeline, PipelineGraph, PipelineRun, PipelineRunLog } from '../types/pipelines'\n\nconst BASE = '/api/pipelines'\n\nexport function listPipelines() {\n  return apiGet<{ pipelines: Pipeline[] }>(BASE)\n}\n\nexport function createPipeline(data: { name: string; description?: string; connection_id?: string }) {\n  return apiPost<{ pipeline: Pipeline }>(BASE, data)\n}\n\nexport function getPipeline(id: string) {\n  return apiGet<{ pipeline: Pipeline; graph: PipelineGraph }>(`${BASE}/${id}`)\n}\n\nexport function updatePipeline(id: string, data: { name: string; description?: string }) {\n  return apiPut<{ pipeline: Pipeline }>(`${BASE}/${id}`, data)\n}\n\nexport function deletePipeline(id: string) {\n  return apiDel(`${BASE}/${id}`)\n}\n\nexport function saveGraph(id: string, graph: {\n  nodes: { id: string; node_type: string; label: string; position_x: number; position_y: number; config: Record<string, unknown> }[]\n  edges: { id: string; source_node_id: string; target_node_id: string; source_handle?: string; target_handle?: string }[]\n  viewport?: { x: number; y: number; zoom: number }\n}) {\n  return apiPut<{ success: string }>(`${BASE}/${id}/graph`, graph)\n}\n\nexport function startPipeline(id: string) {\n  return apiPost<{ success: string }>(`${BASE}/${id}/start`)\n}\n\nexport function stopPipeline(id: string) {\n  return apiPost<{ success: string }>(`${BASE}/${id}/stop`)\n}\n\nexport function getPipelineStatus(id: string) {\n  return apiGet<{\n    pipeline_id: string\n    status: string\n    last_error: string | null\n    rows_ingested?: number\n    bytes_ingested?: number\n    batches_sent?: number\n    errors_count?: number\n  }>(`${BASE}/${id}/status`)\n}\n\nexport function listRuns(id: string, limit = 20, offset = 0) {\n  return apiGet<{ runs: PipelineRun[] }>(`${BASE}/${id}/runs?limit=${limit}&offset=${offset}`)\n}\n\nexport function getRunLogs(id: string, runId: string, limit = 200) {\n  return apiGet<{ logs: PipelineRunLog[] }>(`${BASE}/${id}/runs/${runId}/logs?limit=${limit}`)\n}\n"
  },
  {
    "path": "ui/src/lib/api/query.ts",
    "content": "import { apiGet, apiPost } from './client'\nimport type {\n  LegacyQueryResult,\n  ExplorerDataResponse,\n  QueryPlanResult,\n  QueryProfileResult,\n  QueryEstimateResult,\n  SampleQueryResult,\n} from '../types/query'\nimport type { Column } from '../types/schema'\n\ninterface RunQueryParams {\n  query: string\n  timeout?: number\n}\n\nfunction escapeLiteral(value: string): string {\n  // Reject null bytes which can truncate strings in some SQL engines\n  if (value.includes('\\0')) throw new Error('Invalid character in identifier')\n  return value.replace(/\\\\/g, '\\\\\\\\').replace(/'/g, \"\\\\'\")\n}\n\nfunction escapeIdentifier(value: string): string {\n  if (value.includes('\\0')) throw new Error('Invalid character in identifier')\n  return '`' + value.replace(/`/g, '``') + '`'\n}\n\n/** Execute a query (legacy JSON format) */\nexport function runQuery(params: RunQueryParams): Promise<LegacyQueryResult> {\n  return apiPost<LegacyQueryResult>('/api/query/run', params)\n}\n\n/** Format a SQL query */\nexport async function formatSQL(query: string): Promise<string> {\n  const res = await apiPost<{ formatted: string }>('/api/query/format', { query })\n  return res.formatted\n}\n\n/** Get EXPLAIN output for a query */\nexport function explainQuery(query: string): Promise<LegacyQueryResult> {\n  return apiPost<LegacyQueryResult>('/api/query/explain', { query })\n}\n\n/** Get parsed query plan (tree + raw lines) */\nexport function fetchQueryPlan(query: string): Promise<QueryPlanResult> {\n  return apiPost<QueryPlanResult>('/api/query/plan', { query })\n}\n\n/** Get inline profiling row from system.query_log for a query */\nexport function fetchQueryProfile(query: string): Promise<QueryProfileResult> {\n  return apiPost<QueryProfileResult>('/api/query/profile', { query })\n}\n\n/** Get query cost estimate via EXPLAIN ESTIMATE */\nexport function estimateQuery(query: string): Promise<QueryEstimateResult> {\n  return apiPost<QueryEstimateResult>('/api/query/estimate', { query })\n}\n\n/** Execute sampling query: first N rows per shard with fallback to global sample */\nexport function runSampleQuery(params: {\n  query: string\n  per_shard?: number\n  shard_by?: string\n  timeout?: number\n}): Promise<SampleQueryResult> {\n  return apiPost<SampleQueryResult>('/api/query/sample', {\n    query: params.query,\n    per_shard: params.per_shard ?? 25,\n    shard_by: params.shard_by ?? '_shard_num',\n    timeout: params.timeout ?? 45,\n  })\n}\n\n/** Fetch paginated explorer data (JSONCompact format) */\nexport function fetchExplorerData(params: {\n  database: string\n  table: string\n  page?: number\n  page_size?: number\n  sort_column?: string\n  sort_dir?: string\n}): Promise<ExplorerDataResponse> {\n  return apiPost<ExplorerDataResponse>('/api/query/explorer-data', {\n    database: params.database,\n    table: params.table,\n    page: params.page ?? 0,\n    page_size: params.page_size ?? 100,\n    sort_column: params.sort_column ?? '',\n    sort_dir: params.sort_dir ?? 'asc',\n  })\n}\n\n/** List databases */\nexport async function listDatabases(): Promise<string[]> {\n  const res = await apiGet<{ databases: string[] }>('/api/query/databases')\n  return res.databases ?? []\n}\n\n/** Fetch autocomplete data (functions + keywords) */\nexport async function fetchCompletions(): Promise<{ functions: string[]; keywords: string[] }> {\n  const res = await apiGet<{ functions: string[]; keywords: string[] }>('/api/query/completions')\n  return { functions: res.functions ?? [], keywords: res.keywords ?? [] }\n}\n\n/** List tables in a database */\nexport async function listTables(database: string): Promise<string[]> {\n  const res = await apiGet<{ tables: Array<{ name: string; engine: string }> }>(`/api/query/tables?database=${encodeURIComponent(database)}`)\n  return (res.tables ?? []).map(t => t.name)\n}\n\n/** List columns for a table */\nexport async function listColumns(database: string, table: string): Promise<Column[]> {\n  const res = await apiGet<{ columns: Column[] }>(\n    `/api/query/columns?database=${encodeURIComponent(database)}&table=${encodeURIComponent(table)}`,\n  )\n  return res.columns ?? []\n}\n\n/** Fetch table metadata from system.tables */\nexport async function fetchTableInfo(database: string, table: string): Promise<Record<string, any>> {\n  const db = escapeLiteral(database)\n  const tbl = escapeLiteral(table)\n  const query = `SELECT database, name, engine, total_rows, total_bytes, lifetime_rows, lifetime_bytes, metadata_modification_time, create_table_query, partition_key, sorting_key, primary_key, sampling_key FROM system.tables WHERE database = '${db}' AND name = '${tbl}'`\n  const res = await runQuery({ query })\n  if (res.data?.length > 0) {\n    const row = res.data[0]\n    if (Array.isArray(row)) {\n      const obj: Record<string, any> = {}\n      res.meta.forEach((col: any, i: number) => { obj[col.name] = row[i] })\n      return obj\n    }\n    return row as Record<string, any>\n  }\n  return {}\n}\n\n/** Fetch table schema via DESCRIBE */\nexport async function fetchTableSchema(database: string, table: string): Promise<LegacyQueryResult> {\n  return runQuery({ query: `DESCRIBE TABLE ${escapeIdentifier(database)}.${escapeIdentifier(table)}` })\n}\n\n/** Fetch database metadata and aggregate stats */\nexport async function fetchDatabaseInfo(database: string): Promise<Record<string, any>> {\n  const db = escapeLiteral(database)\n  const query = `SELECT d.name, d.engine, d.data_path, d.metadata_path, count(t.name) AS table_count, sumOrNull(t.total_rows) AS total_rows, sumOrNull(t.total_bytes) AS total_bytes, maxOrNull(t.metadata_modification_time) AS last_modified FROM system.databases d LEFT JOIN system.tables t ON t.database = d.name WHERE d.name = '${db}' GROUP BY d.name, d.engine, d.data_path, d.metadata_path`\n  const res = await runQuery({ query })\n  if (res.data?.length > 0) {\n    const row = res.data[0]\n    if (Array.isArray(row)) {\n      const obj: Record<string, any> = {}\n      res.meta.forEach((col: any, i: number) => { obj[col.name] = row[i] })\n      return obj\n    }\n    return row as Record<string, any>\n  }\n  return {}\n}\n\n/** Fetch tables list and table-level stats for a database */\nexport async function fetchDatabaseTables(database: string): Promise<LegacyQueryResult> {\n  const db = escapeLiteral(database)\n  const query = `SELECT name, engine, total_rows, total_bytes, metadata_modification_time FROM system.tables WHERE database = '${db}' ORDER BY name`\n  return runQuery({ query })\n}\n"
  },
  {
    "path": "ui/src/lib/api/stream.ts",
    "content": "import { withBase } from '../basePath'\nimport type { ColumnMeta, QueryStats, StreamMessage } from '../types/query'\nimport { safeParse } from '../utils/safe-json'\n\n/** Execute a streaming query via NDJSON. Calls the provided callbacks as data arrives. */\nexport async function executeStreamQuery(\n  sql: string,\n  maxResultRows: number,\n  onMeta: (meta: ColumnMeta[]) => void,\n  onChunk: (rows: unknown[][], seq: number) => void,\n  onDone: (stats: QueryStats | undefined, totalRows: number) => void,\n  onError: (error: string) => void,\n  signal?: AbortSignal,\n): Promise<void> {\n  const res = await fetch(withBase('/api/query/stream'), {\n    method: 'POST',\n    headers: { 'Content-Type': 'application/json' },\n    body: JSON.stringify({ query: sql, maxResultRows }),\n    credentials: 'include',\n    signal,\n  })\n\n  if (!res.ok) {\n    const body = await res.json().catch(() => ({ error: `HTTP ${res.status}` }))\n    onError(body.error || `Request failed (${res.status})`)\n    return\n  }\n\n  const reader = res.body!.getReader()\n  const decoder = new TextDecoder()\n  let buf = ''\n\n  while (true) {\n    const { done, value } = await reader.read()\n    if (done) break\n\n    buf += decoder.decode(value, { stream: true })\n    const lines = buf.split('\\n')\n    buf = lines.pop()!\n\n    for (const line of lines) {\n      if (!line.trim()) continue\n      try {\n        const msg: StreamMessage = safeParse(line)\n        switch (msg.type) {\n          case 'meta':\n            onMeta(msg.meta)\n            break\n          case 'chunk':\n            onChunk(msg.data, msg.seq)\n            break\n          case 'done':\n            onDone(msg.statistics, msg.total_rows)\n            break\n          case 'error':\n            onError(msg.error)\n            break\n        }\n      } catch {\n        // Skip malformed lines\n      }\n    }\n  }\n\n  // Process remaining buffer\n  if (buf.trim()) {\n    try {\n      const msg: StreamMessage = safeParse(buf)\n      if (msg.type === 'done') onDone(msg.statistics, msg.total_rows)\n      else if (msg.type === 'error') onError(msg.error)\n    } catch {\n      // ignore\n    }\n  }\n}\n"
  },
  {
    "path": "ui/src/lib/basePath.ts",
    "content": "/**\n * Base path utility for subpath deployments.\n *\n * Supports deploying CH-UI behind a reverse proxy at a subpath.\n * Checks runtime window.env first (for Docker inject-env), falls back to\n * build-time import.meta.env.BASE_URL (set via VITE_BASE_PATH).\n */\n\ndeclare global {\n  interface Window {\n    env?: { VITE_BASE_PATH?: string }\n  }\n}\n\n/** Returns the base path without trailing slash (empty string for root). */\nexport function getBasePath(): string {\n  if (typeof window !== 'undefined' && window.env?.VITE_BASE_PATH) {\n    return window.env.VITE_BASE_PATH.replace(/\\/$/, '')\n  }\n  const base = import.meta.env.BASE_URL ?? '/'\n  return base.endsWith('/') ? base.slice(0, -1) : base\n}\n\n/** Prepends the base path to an absolute path (path should start with /). */\nexport function withBase(path: string): string {\n  const base = getBasePath()\n  return base ? base + path : path\n}\n\n/** Strips the base path prefix from a pathname. */\nexport function stripBase(path: string): string {\n  const base = getBasePath()\n  if (!base) return path\n  return path.startsWith(base) ? (path.slice(base.length) || '/') : path\n}\n"
  },
  {
    "path": "ui/src/lib/components/brain/BrainArtifactCard.svelte",
    "content": "<script lang=\"ts\">\n  import type { BrainArtifact } from '../../types/brain'\n  import type { PanelConfig } from '../../types/api'\n  import { isDateType, isNumericType } from '../../utils/chart-transform'\n  import ChartPanel from '../dashboard/ChartPanel.svelte'\n  import { ChevronRight, BarChart3, Table } from 'lucide-svelte'\n\n  interface Props {\n    artifact: BrainArtifact\n  }\n\n  let { artifact }: Props = $props()\n\n  let expanded = $state(false)\n  let viewMode = $state<'table' | 'chart'>('chart')\n\n  function parsePayload(): any | null {\n    if (artifact.type !== 'query_result') return null\n    try {\n      return JSON.parse(artifact.content)\n    } catch {\n      return null\n    }\n  }\n\n  function getRows(): Record<string, any>[] {\n    const payload = parsePayload()\n    if (!payload || !Array.isArray(payload.data)) return []\n    return payload.data\n  }\n\n  function getColumns(): string[] {\n    const payload = parsePayload()\n    if (payload?.meta && Array.isArray(payload.meta) && payload.meta.length > 0) {\n      return payload.meta.map((m: any) => String(m?.name ?? '')).filter(Boolean)\n    }\n    const rows = getRows()\n    if (rows.length === 0) return []\n    return Object.keys(rows[0])\n  }\n\n  const payload = $derived(parsePayload())\n  const rows = $derived(getRows())\n  const cols = $derived(getColumns())\n  const elapsed = $derived(payload?.statistics?.elapsed)\n\n  /** Auto-detect chart configuration from column types */\n  const chartConfig = $derived.by<PanelConfig | null>(() => {\n    if (!payload?.meta || rows.length < 2) return null\n    const meta = payload.meta as { name: string; type: string }[]\n    const dateCol = meta.find(m => isDateType(m.type))\n    const numericCols = meta.filter(m => isNumericType(m.type))\n    const xColumn = dateCol?.name ?? meta[0]?.name\n    const yColumns = numericCols.filter(m => m.name !== xColumn).map(m => m.name)\n    if (!xColumn || yColumns.length === 0) return null\n    return {\n      chartType: dateCol ? 'timeseries' : 'bar',\n      xColumn,\n      yColumns: yColumns.slice(0, 5),\n    }\n  })\n</script>\n\n<div class=\"mt-2 rounded-lg border border-gray-200 dark:border-gray-700 bg-gray-50/70 dark:bg-gray-900/50 overflow-hidden\">\n  <div class=\"flex items-center gap-2 px-3 py-2\">\n    <button\n      class=\"flex items-center gap-2 flex-1 text-left hover:bg-gray-100 dark:hover:bg-gray-800/50 -mx-1 px-1 rounded transition-colors\"\n      onclick={() => expanded = !expanded}\n    >\n      <ChevronRight size={14} class=\"text-gray-400 transition-transform {expanded ? 'rotate-90' : ''}\" />\n      <span class=\"text-xs font-semibold text-gray-800 dark:text-gray-200 truncate\">{artifact.title}</span>\n      {#if rows.length > 0}\n        <span class=\"ds-badge ds-badge-neutral\">{rows.length} rows</span>\n      {/if}\n      {#if elapsed}\n        <span class=\"text-[11px] text-gray-500\">{elapsed}s</span>\n      {/if}\n    </button>\n\n    {#if chartConfig && expanded}\n      <div class=\"flex items-center rounded-md border border-gray-200 dark:border-gray-700 overflow-hidden shrink-0\">\n        <button\n          class=\"p-1 transition-colors {viewMode === 'chart' ? 'bg-ch-blue/10 text-ch-blue' : 'text-gray-400 hover:text-gray-600 dark:hover:text-gray-300'}\"\n          onclick={() => viewMode = 'chart'}\n          title=\"Chart view\"\n        >\n          <BarChart3 size={13} />\n        </button>\n        <button\n          class=\"p-1 transition-colors border-l border-gray-200 dark:border-gray-700 {viewMode === 'table' ? 'bg-ch-blue/10 text-ch-blue' : 'text-gray-400 hover:text-gray-600 dark:hover:text-gray-300'}\"\n          onclick={() => viewMode = 'table'}\n          title=\"Table view\"\n        >\n          <Table size={13} />\n        </button>\n      </div>\n    {/if}\n  </div>\n\n  {#if expanded}\n    {#if artifact.type === 'query_result' && payload}\n      {#if viewMode === 'chart' && chartConfig}\n        <div class=\"border-t border-gray-200 dark:border-gray-700 h-[220px]\">\n          <ChartPanel data={rows} meta={payload.meta} config={chartConfig} />\n        </div>\n      {:else if cols.length > 0}\n        <div class=\"border-t border-gray-200 dark:border-gray-700 max-h-[240px] overflow-auto\">\n          <table class=\"min-w-full text-[11px] font-mono\">\n            <thead class=\"bg-gray-100 dark:bg-gray-800 sticky top-0\">\n              <tr>\n                {#each cols as col}\n                  <th class=\"px-2 py-1 text-left text-gray-600 dark:text-gray-300 border-b border-gray-200 dark:border-gray-700 whitespace-nowrap\">{col}</th>\n                {/each}\n              </tr>\n            </thead>\n            <tbody>\n              {#each rows as row}\n                <tr class=\"odd:bg-white/70 even:bg-gray-50/70 dark:odd:bg-gray-900/30 dark:even:bg-gray-800/30\">\n                  {#each cols as col}\n                    <td class=\"px-2 py-1 border-b border-gray-200/70 dark:border-gray-700/70 align-top whitespace-nowrap\">{String(row[col] ?? '')}</td>\n                  {/each}\n                </tr>\n              {/each}\n            </tbody>\n          </table>\n        </div>\n      {/if}\n\n      {#if payload?.query}\n        <details class=\"border-t border-gray-200 dark:border-gray-700 px-3 py-2\">\n          <summary class=\"text-[11px] text-ch-blue cursor-pointer\">View query</summary>\n          <pre class=\"mt-1 text-[11px] whitespace-pre-wrap bg-gray-100 dark:bg-gray-800 rounded p-2 max-h-40 overflow-auto\">{payload.query}</pre>\n        </details>\n      {/if}\n\n      <details class=\"border-t border-gray-200 dark:border-gray-700 px-3 py-2\">\n        <summary class=\"text-[11px] text-ch-blue cursor-pointer\">View raw payload</summary>\n        <pre class=\"mt-1 text-[11px] whitespace-pre-wrap bg-gray-100 dark:bg-gray-800 rounded p-2 max-h-52 overflow-auto\">{artifact.content}</pre>\n      </details>\n    {:else}\n      <details class=\"border-t border-gray-200 dark:border-gray-700 px-3 py-2\">\n        <summary class=\"text-[11px] text-ch-blue cursor-pointer\">View payload</summary>\n        <pre class=\"mt-1 text-[11px] whitespace-pre-wrap bg-gray-100 dark:bg-gray-800 rounded p-2 max-h-52 overflow-auto\">{artifact.content}</pre>\n      </details>\n    {/if}\n  {/if}\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/brain/BrainEmptyState.svelte",
    "content": "<script lang=\"ts\">\n  import { Brain } from 'lucide-svelte'\n</script>\n\n<div class=\"h-full flex items-center justify-center text-center text-gray-400\">\n  <div>\n    <Brain size={40} class=\"mx-auto mb-3 text-gray-300 dark:text-gray-700\" />\n    <p class=\"text-sm\">Start a chat to generate SQL and insights.</p>\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/brain/BrainHeader.svelte",
    "content": "<script lang=\"ts\">\n  import type { BrainModelOption } from '../../types/brain'\n  import type { ComboboxOption } from '../common/Combobox.svelte'\n  import Combobox from '../common/Combobox.svelte'\n  import { Brain } from 'lucide-svelte'\n\n  interface Props {\n    models: BrainModelOption[]\n    selectedModelId: string\n    selectedDb: string\n    selectedTable: string\n    databaseOptions: ComboboxOption[]\n    tableOptions: ComboboxOption[]\n    onModelChange: (modelId: string) => void\n    onDbChange: (db: string) => void\n    onTableChange: (table: string) => void\n  }\n\n  let {\n    models,\n    selectedModelId,\n    selectedDb,\n    selectedTable,\n    databaseOptions,\n    tableOptions,\n    onModelChange,\n    onDbChange,\n    onTableChange,\n  }: Props = $props()\n\n  const modelOptions = $derived.by<ComboboxOption[]>(() =>\n    models.map(m => ({\n      value: m.id,\n      label: `${m.display_name || m.name}`,\n      hint: `${m.provider_name} · ${m.provider_kind}`,\n      keywords: `${m.name} ${m.display_name || ''} ${m.provider_name} ${m.provider_kind}`,\n    }))\n  )\n</script>\n\n<div class=\"border-b border-gray-200 dark:border-gray-800 px-4 py-2.5 flex items-center gap-3\">\n  <Brain size={18} class=\"text-ch-blue shrink-0\" />\n  <h1 class=\"text-lg font-semibold text-gray-900 dark:text-gray-100 shrink-0\">Brain</h1>\n\n  <!-- Context controls -->\n  <div class=\"flex items-center gap-2 ml-2\">\n    <div class=\"w-40\">\n      <Combobox\n        options={databaseOptions}\n        value={selectedDb}\n        placeholder=\"+ Database...\"\n        onChange={(v) => onDbChange(v)}\n      />\n    </div>\n\n    {#if selectedDb}\n      <div class=\"w-40\">\n        <Combobox\n          options={tableOptions}\n          value={selectedTable}\n          placeholder=\"+ Table...\"\n          onChange={(v) => onTableChange(v)}\n        />\n      </div>\n    {/if}\n  </div>\n\n  <div class=\"ml-auto w-72 max-w-[35%] shrink-0\">\n    <Combobox\n      options={modelOptions}\n      value={selectedModelId}\n      placeholder=\"Select model\"\n      onChange={(v) => onModelChange(v)}\n    />\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/brain/BrainInput.svelte",
    "content": "<script lang=\"ts\">\n  import Button from '../common/Button.svelte'\n  import { Send, Database as DbIcon, X } from 'lucide-svelte'\n  import BrainMentionDropdown from './BrainMentionDropdown.svelte'\n  import type { SchemaContextEntry } from '../../types/brain'\n\n  interface Props {\n    value: string\n    streaming: boolean\n    contexts: SchemaContextEntry[]\n    onSend: () => void\n    onInput: (value: string) => void\n    onAddContext: (database: string, table: string) => void\n    onRemoveContext: (database: string, table: string) => void\n    onClearAllContexts?: () => void\n  }\n\n  let { value, streaming, contexts, onSend, onInput, onAddContext, onRemoveContext, onClearAllContexts }: Props = $props()\n\n  let textareaEl: HTMLTextAreaElement | undefined = $state()\n  let mentionActive = $state(false)\n  let mentionQuery = $state('')\n  let dropdownRef: { handleKeydown: (e: KeyboardEvent) => boolean } | undefined = $state()\n\n  function handleInput(e: Event) {\n    const target = e.target as HTMLTextAreaElement\n    onInput(target.value)\n    target.style.height = 'auto'\n    target.style.height = `${Math.min(200, target.scrollHeight)}px`\n    detectMention(target.value, target.selectionStart)\n  }\n\n  function detectMention(text: string, cursorPos: number) {\n    // Look backwards from cursor for @ preceded by whitespace/start\n    const before = text.slice(0, cursorPos)\n    const match = before.match(/(^|[\\s])@([^\\s]*)$/)\n    if (match) {\n      mentionActive = true\n      mentionQuery = match[2]\n    } else {\n      mentionActive = false\n      mentionQuery = ''\n    }\n  }\n\n  function handleMentionSelect(database: string, table: string) {\n    // Remove the @query text from input\n    if (textareaEl) {\n      const cursorPos = textareaEl.selectionStart\n      const before = value.slice(0, cursorPos)\n      const after = value.slice(cursorPos)\n      const match = before.match(/(^|[\\s])@([^\\s]*)$/)\n      if (match) {\n        const start = before.length - match[0].length + (match[1] ? match[1].length : 0)\n        const newValue = before.slice(0, start) + after\n        onInput(newValue.trimStart() === '' ? '' : newValue)\n      }\n    }\n    mentionActive = false\n    mentionQuery = ''\n    onAddContext(database, table)\n    textareaEl?.focus()\n  }\n\n  function handleKeydown(e: KeyboardEvent) {\n    // When mention dropdown is active, route keyboard events there\n    if (mentionActive && dropdownRef) {\n      const handled = dropdownRef.handleKeydown(e)\n      if (handled) return\n    }\n\n    if (e.key === 'Enter' && !e.shiftKey) {\n      e.preventDefault()\n      onSend()\n      if (textareaEl) textareaEl.style.height = 'auto'\n    }\n  }\n</script>\n\n<div class=\"border-t border-gray-200 dark:border-gray-800 p-4\">\n  {#if contexts.length > 0}\n    <div class=\"mb-2 flex flex-wrap items-center gap-1\">\n      {#each contexts as ctx (`${ctx.database}.${ctx.table}`)}\n        <span class=\"inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[10px] font-medium bg-ch-blue/10 text-ch-blue\">\n          <DbIcon size={10} />\n          {ctx.database}.{ctx.table}\n          <button\n            class=\"ml-0.5 rounded-full p-0.5 hover:bg-ch-blue/20 transition-colors\"\n            onclick={() => onRemoveContext(ctx.database, ctx.table)}\n            title=\"Remove context\"\n          >\n            <X size={10} />\n          </button>\n        </span>\n      {/each}\n      {#if contexts.length > 1 && onClearAllContexts}\n        <button\n          class=\"text-[10px] text-gray-400 hover:text-gray-600 dark:hover:text-gray-300 ml-1 transition-colors\"\n          onclick={onClearAllContexts}\n        >\n          Clear all\n        </button>\n      {/if}\n    </div>\n  {/if}\n  <div class=\"relative flex items-end gap-2\">\n    {#if mentionActive}\n      <BrainMentionDropdown\n        query={mentionQuery}\n        onSelect={handleMentionSelect}\n        onDismiss={() => { mentionActive = false; mentionQuery = '' }}\n        bind:this={dropdownRef}\n      />\n    {/if}\n    <textarea\n      class=\"flex-1 text-sm bg-transparent border border-gray-300 dark:border-gray-700 rounded-lg px-3 py-2 text-gray-800 dark:text-gray-200 resize-none min-h-[44px] max-h-[200px] focus:outline-none focus:ring-2 focus:ring-ch-blue/40 focus:border-ch-blue/50 transition-colors\"\n      placeholder=\"Ask Brain about your data... (type @ to add table context)\"\n      {value}\n      oninput={handleInput}\n      onkeydown={handleKeydown}\n      disabled={streaming}\n      bind:this={textareaEl}\n    ></textarea>\n    <Button size=\"sm\" onclick={onSend} loading={streaming} disabled={!value.trim() || streaming}>\n      <Send size={14} />\n    </Button>\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/brain/BrainMentionDropdown.svelte",
    "content": "<script lang=\"ts\">\n  import { getDatabases, loadTables } from '../../stores/schema.svelte'\n  import { Database as DbIcon, Loader2 } from 'lucide-svelte'\n\n  interface Props {\n    query: string\n    onSelect: (database: string, table: string) => void\n    onDismiss: () => void\n  }\n\n  let { query, onSelect, onDismiss }: Props = $props()\n\n  let selectedIndex = $state(0)\n  let loadingDb = $state<string | null>(null)\n\n  interface MentionOption {\n    database: string\n    table: string\n    label: string\n    loadTables?: boolean\n  }\n\n  const filteredOptions = $derived.by<MentionOption[]>(() => {\n    const dbs = getDatabases()\n    const q = query.toLowerCase()\n    const results: MentionOption[] = []\n\n    for (const db of dbs) {\n      if (!db.tables || db.tables.length === 0) {\n        // Database without tables loaded — show a \"load\" option\n        const label = `${db.name}.* (load tables...)`\n        if (!q || label.toLowerCase().includes(q) || db.name.toLowerCase().includes(q)) {\n          results.push({ database: db.name, table: '', label, loadTables: true })\n        }\n        continue\n      }\n      for (const table of db.tables) {\n        const label = `${db.name}.${table.name}`\n        if (!q || label.toLowerCase().includes(q)) {\n          results.push({ database: db.name, table: table.name, label })\n        }\n        if (results.length >= 50) break\n      }\n      if (results.length >= 50) break\n    }\n    return results\n  })\n\n  // Reset selected index when options change\n  $effect(() => {\n    filteredOptions // track\n    selectedIndex = 0\n  })\n\n  async function handleSelect(opt: MentionOption) {\n    if (opt.loadTables) {\n      loadingDb = opt.database\n      await loadTables(opt.database)\n      loadingDb = null\n      return\n    }\n    onSelect(opt.database, opt.table)\n  }\n\n  export function handleKeydown(e: KeyboardEvent): boolean {\n    if (filteredOptions.length === 0) return false\n\n    if (e.key === 'ArrowDown') {\n      e.preventDefault()\n      selectedIndex = (selectedIndex + 1) % filteredOptions.length\n      return true\n    }\n    if (e.key === 'ArrowUp') {\n      e.preventDefault()\n      selectedIndex = (selectedIndex - 1 + filteredOptions.length) % filteredOptions.length\n      return true\n    }\n    if (e.key === 'Enter') {\n      e.preventDefault()\n      const opt = filteredOptions[selectedIndex]\n      if (opt) handleSelect(opt)\n      return true\n    }\n    if (e.key === 'Escape') {\n      e.preventDefault()\n      onDismiss()\n      return true\n    }\n    return false\n  }\n</script>\n\n{#if filteredOptions.length > 0}\n  <div class=\"absolute bottom-full left-0 right-0 mb-1 max-h-60 overflow-auto rounded-lg border border-gray-200 dark:border-gray-700 bg-white dark:bg-gray-900 shadow-lg z-50\">\n    {#each filteredOptions as opt, i (opt.label)}\n      <button\n        class=\"w-full text-left px-3 py-1.5 text-sm flex items-center gap-2 transition-colors\n          {i === selectedIndex ? 'bg-ch-blue/10 text-ch-blue' : 'text-gray-700 dark:text-gray-300 hover:bg-gray-100 dark:hover:bg-gray-800'}\"\n        onmouseenter={() => selectedIndex = i}\n        onclick={() => handleSelect(opt)}\n      >\n        {#if opt.loadTables && loadingDb === opt.database}\n          <Loader2 size={14} class=\"animate-spin shrink-0\" />\n        {:else}\n          <DbIcon size={14} class=\"shrink-0 opacity-50\" />\n        {/if}\n        <span class=\"truncate\">{opt.label}</span>\n      </button>\n    {/each}\n  </div>\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/brain/BrainMessage.svelte",
    "content": "<script lang=\"ts\">\n  import type { BrainArtifact, BrainMessage as BrainMessageType } from '../../types/brain'\n  import { parseMessageSegments } from './brain-markdown'\n  import BrainSqlBlock from './BrainSqlBlock.svelte'\n  import BrainArtifactCard from './BrainArtifactCard.svelte'\n  import Spinner from '../common/Spinner.svelte'\n\n  interface Props {\n    message: BrainMessageType\n    artifacts: BrainArtifact[]\n    streaming: boolean\n    isLastMessage: boolean\n    runningSql: string | null\n    onRunSql: (sql: string, messageId?: string) => void\n    onOpenInEditor: (sql: string) => void\n  }\n\n  let { message, artifacts, streaming, isLastMessage, runningSql, onRunSql, onOpenInEditor }: Props = $props()\n\n  const segments = $derived(\n    message.role === 'assistant' && message.content\n      ? parseMessageSegments(message.content)\n      : []\n  )\n\n  /** Find artifact whose query matches this sql block */\n  function findArtifactForSql(sql: string): BrainArtifact | undefined {\n    return artifacts.find(art => {\n      if (art.type !== 'query_result') return false\n      try {\n        const payload = JSON.parse(art.content)\n        return payload?.query?.trim() === sql.trim()\n      } catch {\n        return false\n      }\n    })\n  }\n\n  /** Get artifacts not associated with any specific SQL block */\n  const orphanArtifacts = $derived.by(() => {\n    if (message.role !== 'assistant') return []\n    const sqlTexts = new Set(\n      segments.filter(s => s.type === 'sql').map(s => s.content.trim())\n    )\n    return artifacts.filter(art => {\n      if (art.type !== 'query_result') return true\n      try {\n        const payload = JSON.parse(art.content)\n        return !sqlTexts.has(payload?.query?.trim())\n      } catch {\n        return true\n      }\n    })\n  })\n</script>\n\n{#if message.role === 'user'}\n  <div class=\"flex justify-end\">\n    <div class=\"max-w-[80%] bg-ch-blue text-white rounded-2xl rounded-br-sm px-4 py-2.5 text-sm whitespace-pre-wrap\">{message.content}</div>\n  </div>\n{:else}\n  <div class=\"flex justify-start\">\n    <div class=\"max-w-[85%] min-w-0\">\n      {#if message.content}\n        {#each segments as seg}\n          {#if seg.type === 'markdown'}\n            <div class=\"prose-brain text-sm text-gray-800 dark:text-gray-200\">{@html seg.html}</div>\n          {:else if seg.type === 'sql'}\n            <BrainSqlBlock\n              sql={seg.content}\n              messageId={message.id}\n              artifact={findArtifactForSql(seg.content)}\n              running={runningSql === seg.content}\n              onRun={onRunSql}\n              onOpenInEditor={onOpenInEditor}\n            />\n          {/if}\n        {/each}\n\n        {#if orphanArtifacts.length > 0}\n          {#each orphanArtifacts as art}\n            <BrainArtifactCard artifact={art} />\n          {/each}\n        {/if}\n      {:else if streaming && isLastMessage}\n        <div class=\"bg-gray-100 dark:bg-gray-800 rounded-2xl rounded-bl-sm px-4 py-3\">\n          <Spinner size=\"sm\" />\n        </div>\n      {/if}\n    </div>\n  </div>\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/brain/BrainSidebar.svelte",
    "content": "<script lang=\"ts\">\n  import type { BrainChat } from '../../types/brain'\n  import Spinner from '../common/Spinner.svelte'\n  import { Plus, Search, Edit3, Trash2 } from 'lucide-svelte'\n\n  interface Props {\n    chats: BrainChat[]\n    selectedChatId: string\n    loading: boolean\n    onSelectChat: (chatId: string) => void\n    onCreateChat: () => void\n    onRenameChat: (chat: BrainChat) => void\n    onDeleteChat: (chat: BrainChat) => void\n  }\n\n  let { chats, selectedChatId, loading, onSelectChat, onCreateChat, onRenameChat, onDeleteChat }: Props = $props()\n\n  let search = $state('')\n\n  const filtered = $derived.by(() => {\n    const term = search.trim().toLowerCase()\n    if (!term) return chats\n    return chats.filter(c => c.title.toLowerCase().includes(term))\n  })\n\n  function formatTime(ts?: string | null): string {\n    if (!ts) return ''\n    try {\n      const d = new Date(ts)\n      const now = new Date()\n      const diff = now.getTime() - d.getTime()\n      if (diff < 60_000) return 'just now'\n      if (diff < 3_600_000) return `${Math.floor(diff / 60_000)}m ago`\n      if (diff < 86_400_000) return `${Math.floor(diff / 3_600_000)}h ago`\n      if (diff < 604_800_000) return `${Math.floor(diff / 86_400_000)}d ago`\n      return d.toLocaleDateString()\n    } catch {\n      return ts\n    }\n  }\n</script>\n\n<aside class=\"w-72 border-r border-gray-200 dark:border-gray-800 flex flex-col\">\n  <div class=\"p-3 border-b border-gray-200 dark:border-gray-800\">\n    <button\n      class=\"w-full inline-flex items-center justify-center gap-1.5 px-3 py-1.5 text-xs font-medium rounded-lg bg-ch-blue text-white hover:bg-orange-500 transition-colors\"\n      onclick={onCreateChat}\n    >\n      <Plus size={14} />\n      <span>New Chat</span>\n    </button>\n    <div class=\"mt-2 flex items-center gap-2 border border-gray-300 dark:border-gray-700 rounded-lg px-2 py-1.5\">\n      <Search size={13} class=\"text-gray-400 shrink-0\" />\n      <input class=\"w-full bg-transparent text-xs text-gray-800 dark:text-gray-200 placeholder:text-gray-400 outline-none\" placeholder=\"Search chats\" bind:value={search} />\n    </div>\n  </div>\n\n  <div class=\"flex-1 overflow-auto p-2 space-y-0.5\">\n    {#if loading}\n      <div class=\"flex items-center justify-center py-6\"><Spinner size=\"sm\" /></div>\n    {:else if filtered.length === 0}\n      <p class=\"text-xs text-gray-500 px-2 py-3\">No chats</p>\n    {:else}\n      {#each filtered as chat (chat.id)}\n        <div\n          class=\"group w-full text-left rounded-lg px-3 py-2.5 cursor-pointer transition-colors\n            {selectedChatId === chat.id\n              ? 'bg-ch-blue/10 border-l-2 border-ch-blue'\n              : 'hover:bg-gray-100 dark:hover:bg-gray-800 border-l-2 border-transparent'}\"\n          onclick={() => onSelectChat(chat.id)}\n          onkeydown={(e) => (e.key === 'Enter' || e.key === ' ') && onSelectChat(chat.id)}\n          role=\"button\"\n          tabindex=\"0\"\n        >\n          <div class=\"text-sm text-gray-800 dark:text-gray-200 truncate font-medium\">{chat.title}</div>\n          <div class=\"flex items-center justify-between mt-1\">\n            <span class=\"text-[11px] text-gray-500\">{formatTime(chat.last_message_at ?? chat.updated_at)}</span>\n            <div class=\"flex items-center gap-1 opacity-0 group-hover:opacity-100 transition-opacity\">\n              <button\n                class=\"p-0.5 text-gray-400 hover:text-ch-blue rounded transition-colors\"\n                onclick={(e) => { e.stopPropagation(); onRenameChat(chat) }}\n                title=\"Rename\"\n              >\n                <Edit3 size={12} />\n              </button>\n              <button\n                class=\"p-0.5 text-gray-400 hover:text-red-500 rounded transition-colors\"\n                onclick={(e) => { e.stopPropagation(); onDeleteChat(chat) }}\n                title=\"Delete\"\n              >\n                <Trash2 size={12} />\n              </button>\n            </div>\n          </div>\n        </div>\n      {/each}\n    {/if}\n  </div>\n</aside>\n"
  },
  {
    "path": "ui/src/lib/components/brain/BrainSqlBlock.svelte",
    "content": "<script lang=\"ts\">\n  import type { BrainArtifact } from '../../types/brain'\n  import { copyToClipboard } from '../../utils/export'\n  import { success as toastSuccess } from '../../stores/toast.svelte'\n  import { highlightSQL } from './brain-markdown'\n  import BrainArtifactCard from './BrainArtifactCard.svelte'\n  import { Copy, ExternalLink, Play } from 'lucide-svelte'\n  import Spinner from '../common/Spinner.svelte'\n\n  interface Props {\n    sql: string\n    messageId?: string\n    artifact?: BrainArtifact | null\n    running?: boolean\n    onRun: (sql: string, messageId?: string) => void\n    onOpenInEditor: (sql: string) => void\n  }\n\n  let { sql, messageId, artifact = null, running = false, onRun, onOpenInEditor }: Props = $props()\n\n  async function handleCopy() {\n    await copyToClipboard(sql)\n    toastSuccess('Copied to clipboard')\n  }\n</script>\n\n<div class=\"my-3 rounded-lg border border-gray-200 dark:border-gray-700 overflow-hidden\">\n  <div class=\"flex items-center justify-between px-3 py-1.5 bg-gray-100/80 dark:bg-gray-800/60 border-b border-gray-200 dark:border-gray-700\">\n    <span class=\"ds-badge ds-badge-neutral text-[10px]\">SQL</span>\n    <div class=\"flex items-center gap-1\">\n      <button\n        class=\"inline-flex items-center gap-1 px-1.5 py-0.5 text-[11px] text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 rounded hover:bg-gray-200/70 dark:hover:bg-gray-700/70 transition-colors\"\n        title=\"Copy\"\n        onclick={handleCopy}\n      >\n        <Copy size={12} />\n      </button>\n      <button\n        class=\"inline-flex items-center gap-1 px-1.5 py-0.5 text-[11px] text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 rounded hover:bg-gray-200/70 dark:hover:bg-gray-700/70 transition-colors\"\n        title=\"Open in Editor\"\n        onclick={() => onOpenInEditor(sql)}\n      >\n        <ExternalLink size={12} />\n        <span>Editor</span>\n      </button>\n      <button\n        class=\"inline-flex items-center gap-1 px-2 py-0.5 text-[11px] font-medium text-white bg-ch-blue hover:bg-orange-500 rounded transition-colors disabled:opacity-50\"\n        onclick={() => onRun(sql, messageId)}\n        disabled={running}\n      >\n        {#if running}\n          <Spinner size=\"sm\" />\n        {:else}\n          <Play size={12} />\n        {/if}\n        <span>Run</span>\n      </button>\n    </div>\n  </div>\n  <pre class=\"p-3 text-xs font-mono overflow-x-auto bg-gray-50/80 dark:bg-gray-900/50 leading-relaxed\"><code>{@html highlightSQL(sql)}</code></pre>\n</div>\n{#if artifact}\n  <BrainArtifactCard {artifact} />\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/brain/brain-markdown.ts",
    "content": "import { Marked } from 'marked'\n\nexport interface MessageSegment {\n  type: 'markdown' | 'sql'\n  content: string\n  html?: string\n}\n\nconst SQL_FENCE_RE = /```sql\\n([\\s\\S]*?)```/g\n\nconst marked = new Marked({\n  breaks: true,\n  gfm: true,\n})\n\n/** Render a markdown string to HTML using marked. */\nexport function renderMarkdown(content: string): string {\n  return marked.parse(content) as string\n}\n\n/**\n * Split assistant message content into alternating markdown and sql segments.\n * SQL segments are extracted from ```sql fences so they can be rendered\n * as interactive Svelte components instead of static HTML.\n */\nexport function parseMessageSegments(content: string): MessageSegment[] {\n  const segments: MessageSegment[] = []\n  let lastIndex = 0\n\n  for (const match of content.matchAll(SQL_FENCE_RE)) {\n    const matchStart = match.index!\n    // Markdown text before this sql block\n    if (matchStart > lastIndex) {\n      const md = content.slice(lastIndex, matchStart)\n      segments.push({ type: 'markdown', content: md, html: renderMarkdown(md) })\n    }\n    // The sql block itself\n    segments.push({ type: 'sql', content: match[1].trim() })\n    lastIndex = matchStart + match[0].length\n  }\n\n  // Trailing markdown after the last sql block\n  if (lastIndex < content.length) {\n    const md = content.slice(lastIndex)\n    segments.push({ type: 'markdown', content: md, html: renderMarkdown(md) })\n  }\n\n  return segments\n}\n\n/** Extract raw SQL strings from ```sql fences. */\nexport function extractSqlBlocks(content: string): string[] {\n  const blocks: string[] = []\n  for (const match of content.matchAll(SQL_FENCE_RE)) {\n    blocks.push(match[1].trim())\n  }\n  return blocks\n}\n\nconst SQL_KEYWORDS = new Set([\n  'SELECT','FROM','WHERE','JOIN','LEFT','RIGHT','INNER','OUTER','CROSS','FULL',\n  'ON','AND','OR','NOT','IN','IS','NULL','LIKE','BETWEEN','EXISTS',\n  'GROUP','BY','ORDER','ASC','DESC','LIMIT','OFFSET','HAVING',\n  'INSERT','INTO','VALUES','UPDATE','SET','DELETE','CREATE','ALTER','DROP',\n  'TABLE','INDEX','VIEW','AS','WITH','UNION','ALL','DISTINCT','CASE','WHEN',\n  'THEN','ELSE','END','CAST','IF','ARRAY','MAP','TUPLE',\n  'FORMAT','USING','ENGINE','PARTITION','SAMPLE','PREWHERE','GLOBAL',\n  'ANY','ANTI','SEMI','MATERIALIZED','FINAL','SETTINGS',\n  'TRUE','FALSE','COUNT','SUM','AVG','MIN','MAX','UNIQ',\n])\n\n/** Lightweight SQL syntax highlighting — returns HTML with colored spans. */\nexport function highlightSQL(sql: string): string {\n  return sql.replace(\n    /('(?:[^'\\\\]|\\\\.)*')|(\"(?:[^\"\\\\]|\\\\.)*\")|(--[^\\n]*)|(\\b\\d+(?:\\.\\d+)?\\b)|(\\b[A-Za-z_]\\w*\\b)/g,\n    (match, singleStr: string, doubleStr: string, comment: string, num: string, word: string) => {\n      if (singleStr || doubleStr)\n        return `<span class=\"text-green-600 dark:text-green-400\">${escapeHtml(match)}</span>`\n      if (comment)\n        return `<span class=\"text-gray-400 italic\">${escapeHtml(match)}</span>`\n      if (num)\n        return `<span class=\"text-amber-600 dark:text-amber-400\">${escapeHtml(match)}</span>`\n      if (word && SQL_KEYWORDS.has(word.toUpperCase()))\n        return `<span class=\"text-blue-500 dark:text-blue-400 font-semibold\">${escapeHtml(match)}</span>`\n      return escapeHtml(match)\n    },\n  )\n}\n\nfunction escapeHtml(s: string): string {\n  return s.replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/\"/g, '&quot;')\n}\n"
  },
  {
    "path": "ui/src/lib/components/common/Button.svelte",
    "content": "<script lang=\"ts\">\n  import type { Snippet } from 'svelte'\n\n  interface Props {\n    variant?: 'primary' | 'secondary' | 'ghost' | 'danger'\n    size?: 'sm' | 'md' | 'lg'\n    disabled?: boolean\n    loading?: boolean\n    type?: 'button' | 'submit'\n    onclick?: (e: MouseEvent) => void\n    children: Snippet\n  }\n\n  let { variant = 'primary', size = 'md', disabled = false, loading = false, type = 'button', onclick, children }: Props = $props()\n\n  const base = 'inline-flex items-center justify-center font-semibold rounded-lg transition-all duration-150 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-offset-transparent disabled:opacity-50 disabled:cursor-not-allowed'\n\n  const variants: Record<string, string> = {\n    primary: 'bg-ch-blue text-white border border-orange-500 hover:bg-orange-500 focus:ring-orange-400 shadow-[0_1px_0_rgba(255,255,255,0.08)_inset]',\n    secondary: 'bg-gray-100 dark:bg-gray-800 text-gray-800 dark:text-gray-200 hover:bg-gray-200 dark:hover:bg-gray-700 focus:ring-gray-400 dark:focus:ring-gray-600 border border-gray-300 dark:border-gray-700',\n    ghost: 'text-gray-600 dark:text-gray-300 hover:text-gray-900 dark:hover:text-gray-100 hover:bg-gray-200/70 dark:hover:bg-gray-800/70 focus:ring-gray-400 dark:focus:ring-gray-600 border border-transparent',\n    danger: 'bg-red-600 text-white hover:bg-red-700 focus:ring-red-500 border border-red-700',\n  }\n\n  const sizes: Record<string, string> = {\n    sm: 'px-2.5 py-1.5 text-xs gap-1.5',\n    md: 'px-3.5 py-2 text-sm gap-2',\n    lg: 'px-5 py-2.5 text-base gap-2.5',\n  }\n</script>\n\n<button\n  {type}\n  class=\"{base} {variants[variant]} {sizes[size]}\"\n  disabled={disabled || loading}\n  {onclick}\n>\n  {#if loading}\n    <svg class=\"animate-spin h-4 w-4\" viewBox=\"0 0 24 24\" fill=\"none\">\n      <circle class=\"opacity-25\" cx=\"12\" cy=\"12\" r=\"10\" stroke=\"currentColor\" stroke-width=\"4\" />\n      <path class=\"opacity-75\" fill=\"currentColor\" d=\"M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z\" />\n    </svg>\n  {/if}\n  {@render children()}\n</button>\n"
  },
  {
    "path": "ui/src/lib/components/common/Combobox.svelte",
    "content": "<script lang=\"ts\">\n  import { tick } from 'svelte'\n  import { Search, ChevronDown, Check } from 'lucide-svelte'\n\n  export interface ComboboxOption {\n    value: string\n    label: string\n    hint?: string\n    keywords?: string\n    disabled?: boolean\n  }\n\n  interface Props {\n    options: ComboboxOption[]\n    value?: string\n    placeholder?: string\n    emptyText?: string\n    disabled?: boolean\n    onChange?: (value: string) => void\n  }\n\n  let {\n    options,\n    value = '',\n    placeholder = 'Select...',\n    emptyText = 'No matches',\n    disabled = false,\n    onChange,\n  }: Props = $props()\n\n  let open = $state(false)\n  let query = $state('')\n  let highlighted = $state(0)\n  let inputEl: HTMLInputElement | undefined = $state()\n  let rootEl: HTMLDivElement | undefined = $state()\n  let openUpward = $state(false)\n\n  const selected = $derived(options.find((o) => o.value === value))\n\n  const filtered = $derived.by(() => {\n    const term = query.trim().toLowerCase()\n    if (!term) return options\n    return options.filter((o) => {\n      const pool = `${o.label} ${o.hint ?? ''} ${o.keywords ?? ''}`.toLowerCase()\n      return pool.includes(term)\n    })\n  })\n\n  async function openMenu(e?: MouseEvent) {\n    if (disabled) return\n    const rect = (e?.currentTarget as HTMLElement | null)?.getBoundingClientRect?.()\n    if (rect) {\n      const estimatedMenuHeight = 280\n      const spaceBelow = window.innerHeight - rect.bottom\n      const spaceAbove = rect.top\n      openUpward = spaceBelow < estimatedMenuHeight && spaceAbove > spaceBelow\n    } else {\n      openUpward = false\n    }\n    open = true\n    query = ''\n    highlighted = Math.max(0, filtered.findIndex((o) => o.value === value))\n    await tick()\n    inputEl?.focus()\n  }\n\n  function closeMenu() {\n    open = false\n    query = ''\n    highlighted = 0\n  }\n\n  function selectOption(opt: ComboboxOption) {\n    if (opt.disabled) return\n    closeMenu()\n    onChange?.(opt.value)\n  }\n\n  function onKeydown(e: KeyboardEvent) {\n    if (!open) return\n    if (e.key === 'Escape') {\n      e.preventDefault()\n      closeMenu()\n      return\n    }\n    if (e.key === 'ArrowDown') {\n      e.preventDefault()\n      highlighted = Math.min(filtered.length - 1, highlighted + 1)\n      return\n    }\n    if (e.key === 'ArrowUp') {\n      e.preventDefault()\n      highlighted = Math.max(0, highlighted - 1)\n      return\n    }\n    if (e.key === 'Enter' && filtered[highlighted]) {\n      e.preventDefault()\n      selectOption(filtered[highlighted])\n    }\n  }\n\n  function onWindowPointerDown(e: PointerEvent) {\n    if (!open) return\n    const target = e.target as Node | null\n    if (!target) return\n    if (rootEl?.contains(target)) return\n    closeMenu()\n  }\n\n</script>\n\n<svelte:window onkeydown={onKeydown} onpointerdown={onWindowPointerDown} />\n\n<div class=\"relative\" bind:this={rootEl}>\n  <button\n    type=\"button\"\n    class=\"ds-input flex items-center gap-2 text-left\"\n    onclick={open ? closeMenu : openMenu}\n    disabled={disabled}\n  >\n    <span class=\"flex-1 truncate {selected ? '' : 'text-gray-400 dark:text-gray-500'}\">\n      {selected?.label ?? placeholder}\n    </span>\n    <ChevronDown size={14} class=\"text-gray-500 {open ? 'rotate-180' : ''} transition-transform\" />\n  </button>\n\n  {#if open}\n    <div\n      class=\"fixed inset-0 z-[65]\"\n      role=\"button\"\n      tabindex=\"-1\"\n      onclick={(e) => { e.preventDefault(); closeMenu() }}\n      onkeydown={(e) => (e.key === 'Escape' || e.key === 'Enter') && closeMenu()}\n    ></div>\n    <div class=\"absolute z-[66] w-full min-w-[220px] rounded-xl surface-card overflow-hidden shadow-2xl {openUpward ? 'bottom-full mb-1' : 'mt-1'}\">\n      <div class=\"flex items-center gap-2 px-2.5 py-2 border-b border-gray-200/70 dark:border-gray-800/70\">\n        <Search size={13} class=\"text-gray-500\" />\n        <input\n          bind:this={inputEl}\n          bind:value={query}\n          class=\"w-full bg-transparent text-sm text-gray-800 dark:text-gray-200 placeholder:text-gray-400 outline-none\"\n          type=\"text\"\n          placeholder=\"Type to search\"\n        />\n      </div>\n\n      <div class=\"max-h-56 overflow-y-auto p-1.5\">\n        {#if filtered.length === 0}\n          <div class=\"px-2.5 py-8 text-center text-xs text-gray-500\">{emptyText}</div>\n        {:else}\n          {#each filtered as opt, idx (opt.value)}\n            <button\n              type=\"button\"\n              class=\"w-full flex items-center gap-2 px-2.5 py-2 rounded-lg text-left transition-colors {idx === highlighted ? 'bg-ch-blue/10 text-ch-blue' : 'text-gray-700 dark:text-gray-300 hover:bg-gray-200/55 dark:hover:bg-gray-800/55'}\"\n              onclick={(e) => { e.preventDefault(); selectOption(opt) }}\n              onmouseenter={() => highlighted = idx}\n              disabled={opt.disabled}\n            >\n              <span class=\"flex-1 min-w-0\">\n                <span class=\"block text-sm truncate\">{opt.label}</span>\n                {#if opt.hint}\n                  <span class=\"block text-[11px] text-gray-500 dark:text-gray-400 truncate\">{opt.hint}</span>\n                {/if}\n              </span>\n              {#if value === opt.value}\n                <Check size={13} class=\"text-ch-blue\" />\n              {/if}\n            </button>\n          {/each}\n        {/if}\n      </div>\n    </div>\n  {/if}\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/common/ConfirmDialog.svelte",
    "content": "<script lang=\"ts\">\n  import Modal from './Modal.svelte'\n  import Button from './Button.svelte'\n\n  interface Props {\n    open: boolean\n    title: string\n    description?: string\n    confirmLabel?: string\n    cancelLabel?: string\n    loading?: boolean\n    destructive?: boolean\n    onconfirm: () => void\n    oncancel: () => void\n  }\n\n  let {\n    open,\n    title,\n    description = '',\n    confirmLabel = 'Confirm',\n    cancelLabel = 'Cancel',\n    loading = false,\n    destructive = false,\n    onconfirm,\n    oncancel,\n  }: Props = $props()\n</script>\n\n<Modal {open} title={title} onclose={oncancel}>\n  <div class=\"space-y-4\">\n    {#if description}\n      <p class=\"text-sm text-gray-600 dark:text-gray-300\">{description}</p>\n    {/if}\n\n    <div class=\"flex items-center justify-end gap-2 pt-1\">\n      <Button size=\"sm\" variant=\"secondary\" onclick={oncancel} disabled={loading}>{cancelLabel}</Button>\n      <Button size=\"sm\" variant={destructive ? 'danger' : 'primary'} onclick={onconfirm} {loading}>{confirmLabel}</Button>\n    </div>\n  </div>\n</Modal>\n"
  },
  {
    "path": "ui/src/lib/components/common/ContextMenu.svelte",
    "content": "<script lang=\"ts\">\n  import { tick } from 'svelte'\n\n  export interface ContextMenuItem {\n    id: string\n    label?: string\n    icon?: any\n    shortcut?: string\n    disabled?: boolean\n    danger?: boolean\n    separator?: boolean\n    onSelect?: () => void\n  }\n\n  interface Props {\n    open?: boolean\n    x?: number\n    y?: number\n    items?: ContextMenuItem[]\n    onclose?: () => void\n  }\n\n  let {\n    open = false,\n    x = 0,\n    y = 0,\n    items = [],\n    onclose,\n  }: Props = $props()\n\n  let menuEl = $state<HTMLDivElement | null>(null)\n  let left = $state(0)\n  let top = $state(0)\n  let highlightedIndex = $state(-1)\n\n  function closeMenu() {\n    onclose?.()\n  }\n\n  function firstEnabledIndex(): number {\n    return items.findIndex((item) => !item.separator && !item.disabled)\n  }\n\n  function moveHighlight(direction: 1 | -1) {\n    if (!items.length) return\n    let idx = highlightedIndex\n    for (let i = 0; i < items.length; i += 1) {\n      idx = (idx + direction + items.length) % items.length\n      const item = items[idx]\n      if (!item.separator && !item.disabled) {\n        highlightedIndex = idx\n        return\n      }\n    }\n  }\n\n  function activateIndex(index: number) {\n    const item = items[index]\n    if (!item || item.separator || item.disabled) return\n    item.onSelect?.()\n    closeMenu()\n  }\n\n  function reposition() {\n    if (!menuEl) return\n    const rect = menuEl.getBoundingClientRect()\n    const pad = 8\n    left = Math.min(Math.max(x, pad), window.innerWidth - rect.width - pad)\n    top = Math.min(Math.max(y, pad), window.innerHeight - rect.height - pad)\n  }\n\n  function handleKeydown(e: KeyboardEvent) {\n    if (!open) return\n    if (e.key === 'Escape') {\n      e.preventDefault()\n      closeMenu()\n      return\n    }\n    if (e.key === 'ArrowDown') {\n      e.preventDefault()\n      moveHighlight(1)\n      return\n    }\n    if (e.key === 'ArrowUp') {\n      e.preventDefault()\n      moveHighlight(-1)\n      return\n    }\n    if (e.key === 'Enter') {\n      e.preventDefault()\n      if (highlightedIndex >= 0) activateIndex(highlightedIndex)\n    }\n  }\n\n  $effect(() => {\n    if (!open) {\n      highlightedIndex = -1\n      return\n    }\n    left = x\n    top = y\n    tick().then(() => {\n      reposition()\n      highlightedIndex = firstEnabledIndex()\n    })\n  })\n</script>\n\n<svelte:window onkeydown={handleKeydown} onresize={reposition} />\n\n{#if open}\n  <div\n    class=\"fixed inset-0 z-[95]\"\n    role=\"button\"\n    tabindex=\"-1\"\n    onclick={closeMenu}\n    onkeydown={(e) => (e.key === 'Escape' || e.key === 'Enter') && closeMenu()}\n    oncontextmenu={(e) => {\n      e.preventDefault()\n      closeMenu()\n    }}\n  ></div>\n  <div\n    bind:this={menuEl}\n    class=\"fixed z-[96] min-w-[230px] max-w-[320px] rounded-xl border border-gray-200/80 dark:border-gray-700/80 bg-white/96 dark:bg-gray-900/96 backdrop-blur-xl shadow-[0_16px_40px_rgba(0,0,0,0.35)] py-1.5\"\n    style={`left:${left}px;top:${top}px`}\n    role=\"menu\"\n  >\n    {#each items as item, i (item.id)}\n      {#if item.separator}\n        <div class=\"my-1 h-px bg-gray-200/90 dark:bg-gray-800/90\"></div>\n      {:else}\n        {@const Icon = item.icon}\n        <button\n          class=\"group/menuitem flex w-full items-center justify-between gap-3 px-3 py-2 text-[13px] transition-colors\n            {item.disabled\n              ? 'text-gray-400 cursor-not-allowed'\n              : item.danger\n                ? (highlightedIndex === i ? 'bg-red-500/12 text-red-500 dark:text-red-400' : 'text-red-500 dark:text-red-400 hover:bg-red-500/10')\n                : (highlightedIndex === i ? 'bg-ch-blue/12 text-gray-900 dark:text-gray-100' : 'text-gray-700 dark:text-gray-300 hover:bg-gray-200/70 dark:hover:bg-gray-800/70')}\"\n          role=\"menuitem\"\n          disabled={item.disabled}\n          onmouseenter={() => !item.disabled && (highlightedIndex = i)}\n          onclick={() => activateIndex(i)}\n        >\n          <span class=\"inline-flex min-w-0 items-center gap-2\">\n            {#if Icon}\n              <Icon size={13} class=\"shrink-0 opacity-85\" />\n            {/if}\n            <span class=\"truncate\">{item.label}</span>\n          </span>\n          {#if item.shortcut}\n            <kbd class=\"text-[11px] px-1.5 py-0.5 rounded border border-gray-300 dark:border-gray-700 text-gray-500 dark:text-gray-400 font-medium\">{item.shortcut}</kbd>\n          {/if}\n        </button>\n      {/if}\n    {/each}\n  </div>\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/common/HelpTip.svelte",
    "content": "<script lang=\"ts\">\n  import { onDestroy, tick } from 'svelte'\n  import { HelpCircle } from 'lucide-svelte'\n\n  interface Props {\n    text: string\n    side?: 'top' | 'bottom'\n  }\n\n  let { text, side = 'top' }: Props = $props()\n\n  let triggerEl = $state<HTMLButtonElement | null>(null)\n  let tooltipEl = $state<HTMLDivElement | null>(null)\n  let open = $state(false)\n  let x = $state(0)\n  let y = $state(0)\n  let placement = $state<'top' | 'bottom'>('top')\n  let rafId: number | null = null\n\n  const GAP = 10\n  const VIEWPORT_PADDING = 8\n\n  function clamp(value: number, min: number, max: number): number {\n    return Math.min(Math.max(value, min), max)\n  }\n\n  function clearPositionRaf() {\n    if (rafId !== null) {\n      cancelAnimationFrame(rafId)\n      rafId = null\n    }\n  }\n\n  function detachViewportListeners() {\n    window.removeEventListener('scroll', schedulePosition, true)\n    window.removeEventListener('resize', schedulePosition)\n  }\n\n  function attachViewportListeners() {\n    window.addEventListener('scroll', schedulePosition, true)\n    window.addEventListener('resize', schedulePosition)\n  }\n\n  async function positionTooltip() {\n    if (!open || !triggerEl || !tooltipEl) return\n    await tick()\n    if (!open || !triggerEl || !tooltipEl) return\n\n    const triggerRect = triggerEl.getBoundingClientRect()\n    const tooltipRect = tooltipEl.getBoundingClientRect()\n    const preferred = side\n\n    const topY = triggerRect.top - tooltipRect.height - GAP\n    const bottomY = triggerRect.bottom + GAP\n    const fitsTop = topY >= VIEWPORT_PADDING\n    const fitsBottom = bottomY + tooltipRect.height <= window.innerHeight - VIEWPORT_PADDING\n\n    let nextPlacement: 'top' | 'bottom' = preferred\n    if (preferred === 'top' && !fitsTop && fitsBottom) nextPlacement = 'bottom'\n    if (preferred === 'bottom' && !fitsBottom && fitsTop) nextPlacement = 'top'\n\n    let nextY = nextPlacement === 'top' ? topY : bottomY\n    nextY = clamp(nextY, VIEWPORT_PADDING, window.innerHeight - tooltipRect.height - VIEWPORT_PADDING)\n\n    let nextX = triggerRect.left + (triggerRect.width / 2) - (tooltipRect.width / 2)\n    nextX = clamp(nextX, VIEWPORT_PADDING, window.innerWidth - tooltipRect.width - VIEWPORT_PADDING)\n\n    placement = nextPlacement\n    x = Math.round(nextX)\n    y = Math.round(nextY)\n  }\n\n  function schedulePosition() {\n    clearPositionRaf()\n    rafId = requestAnimationFrame(() => {\n      void positionTooltip()\n    })\n  }\n\n  function openTip() {\n    placement = side\n    open = true\n    attachViewportListeners()\n    schedulePosition()\n  }\n\n  function closeTip() {\n    open = false\n    detachViewportListeners()\n    clearPositionRaf()\n  }\n\n  function handleKeydown(event: KeyboardEvent) {\n    if (event.key === 'Escape') closeTip()\n  }\n\n  onDestroy(() => {\n    detachViewportListeners()\n    clearPositionRaf()\n  })\n</script>\n\n<span class=\"inline-flex items-center align-middle\">\n  <button\n    bind:this={triggerEl}\n    type=\"button\"\n    class=\"inline-flex h-4 w-4 items-center justify-center rounded-full text-gray-400 hover:text-ch-blue focus:outline-none focus:ring-2 focus:ring-ch-blue/35\"\n    aria-label=\"Help\"\n    onmouseenter={openTip}\n    onmouseleave={closeTip}\n    onfocus={openTip}\n    onblur={closeTip}\n    onkeydown={handleKeydown}\n  >\n    <HelpCircle size={13} />\n  </button>\n</span>\n\n{#if open}\n  <div\n    bind:this={tooltipEl}\n    class=\"pointer-events-none fixed z-[260] w-[min(22rem,calc(100vw-16px))] rounded-md border border-gray-200 dark:border-gray-700 bg-gray-50/98 dark:bg-gray-900/98 px-2.5 py-2 text-[11px] leading-relaxed text-gray-600 dark:text-gray-300 shadow-2xl\"\n    style={`left:${x}px;top:${y}px;`}\n    role=\"tooltip\"\n  >\n    {text}\n    <span\n      class={`absolute left-1/2 h-2 w-2 -translate-x-1/2 rotate-45 border-gray-200 dark:border-gray-700 bg-gray-50 dark:bg-gray-900 ${\n        placement === 'top'\n          ? '-bottom-1 border-r border-b'\n          : '-top-1 border-l border-t'\n      }`}\n    ></span>\n  </div>\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/common/InputDialog.svelte",
    "content": "<script lang=\"ts\">\n  import Modal from './Modal.svelte'\n  import Button from './Button.svelte'\n\n  interface Props {\n    open: boolean\n    title: string\n    description?: string\n    placeholder?: string\n    value: string\n    confirmLabel?: string\n    cancelLabel?: string\n    loading?: boolean\n    onconfirm: (value: string) => void\n    oncancel: () => void\n  }\n\n  let {\n    open,\n    title,\n    description = '',\n    placeholder = '',\n    value = $bindable(''),\n    confirmLabel = 'Save',\n    cancelLabel = 'Cancel',\n    loading = false,\n    onconfirm,\n    oncancel,\n  }: Props = $props()\n\n  let inputEl: HTMLInputElement | undefined = $state()\n\n  $effect(() => {\n    if (open && inputEl) {\n      inputEl.focus()\n      inputEl.select()\n    }\n  })\n\n  function handleKeydown(e: KeyboardEvent) {\n    if (e.key === 'Enter' && value.trim()) {\n      e.preventDefault()\n      onconfirm(value.trim())\n    }\n  }\n</script>\n\n<Modal {open} {title} onclose={oncancel}>\n  <div class=\"space-y-4\">\n    {#if description}\n      <p class=\"text-sm text-gray-600 dark:text-gray-300\">{description}</p>\n    {/if}\n\n    <input\n      bind:this={inputEl}\n      bind:value\n      {placeholder}\n      onkeydown={handleKeydown}\n      class=\"w-full rounded-lg border border-gray-300 dark:border-gray-700 bg-white dark:bg-gray-800 px-3 py-2 text-sm text-gray-900 dark:text-gray-100 focus:outline-none focus:ring-2 focus:ring-orange-500 dark:focus:ring-orange-400\"\n      disabled={loading}\n    />\n\n    <div class=\"flex items-center justify-end gap-2 pt-1\">\n      <Button size=\"sm\" variant=\"secondary\" onclick={oncancel} disabled={loading}>{cancelLabel}</Button>\n      <Button size=\"sm\" variant=\"primary\" onclick={() => onconfirm(value.trim())} {loading} disabled={!value.trim()}>{confirmLabel}</Button>\n    </div>\n  </div>\n</Modal>\n"
  },
  {
    "path": "ui/src/lib/components/common/MiniTrendChart.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import uPlot from 'uplot'\n  import 'uplot/dist/uPlot.min.css'\n\n  interface Props {\n    x: number[]\n    y: number[]\n    color?: string\n    fill?: string\n    height?: number\n    strokeWidth?: number\n  }\n\n  let { x, y, color = '#f97316', fill = 'rgba(249, 115, 22, 0.16)', height = 120, strokeWidth = 2 }: Props = $props()\n\n  let container: HTMLDivElement\n  let chart: uPlot | null = null\n\n  function destroyChart() {\n    if (chart) {\n      chart.destroy()\n      chart = null\n    }\n  }\n\n  function draw() {\n    destroyChart()\n    if (!container) return\n    if (!x || !y || x.length === 0 || y.length === 0) return\n\n    const opts: uPlot.Options = {\n      width: container.clientWidth || 320,\n      height,\n      legend: { show: false },\n      cursor: { show: false },\n      axes: [\n        { show: false },\n        { show: false },\n      ],\n      scales: {\n        x: { time: false },\n      },\n      series: [\n        {},\n        {\n          stroke: color,\n          width: strokeWidth,\n          fill,\n          points: { show: false },\n        },\n      ],\n      padding: [4, 6, 4, 6],\n    }\n\n    chart = new uPlot(opts, [x, y], container)\n  }\n\n  onMount(() => {\n    draw()\n    const resize = () => draw()\n    window.addEventListener('resize', resize)\n    return () => {\n      window.removeEventListener('resize', resize)\n      destroyChart()\n    }\n  })\n\n  $effect(() => {\n    x\n    y\n    draw()\n  })\n</script>\n\n<div bind:this={container} class=\"w-full min-h-[100px]\"></div>\n"
  },
  {
    "path": "ui/src/lib/components/common/Modal.svelte",
    "content": "<script lang=\"ts\">\n  import type { Snippet } from 'svelte'\n  import { X } from 'lucide-svelte'\n\n  interface Props {\n    open: boolean\n    title?: string\n    onclose: () => void\n    children: Snippet\n  }\n\n  let { open, title = '', onclose, children }: Props = $props()\n\n  function handleKeydown(e: KeyboardEvent) {\n    if (e.key === 'Escape') onclose()\n  }\n</script>\n\n<svelte:window onkeydown={handleKeydown} />\n\n{#if open}\n  <!-- Backdrop -->\n  <div\n    class=\"fixed inset-0 z-[70] bg-black/60 backdrop-blur-sm\"\n    onclick={onclose}\n    role=\"presentation\"\n  ></div>\n\n  <!-- Dialog -->\n  <div class=\"fixed inset-0 z-[80] flex items-center justify-center p-4\">\n    <div\n      class=\"surface-card bg-gray-50/95 dark:bg-gray-900/95 backdrop-blur-xl rounded-xl max-w-lg w-full max-h-[80vh] overflow-auto\"\n      role=\"dialog\"\n      aria-modal=\"true\"\n      tabindex=\"0\"\n      onclick={(e: MouseEvent) => e.stopPropagation()}\n      onkeydown={(e: KeyboardEvent) => e.stopPropagation()}\n    >\n      {#if title}\n        <div class=\"flex items-center justify-between px-5 py-4 border-b border-gray-200 dark:border-gray-800\">\n          <h2 class=\"text-lg font-semibold text-gray-900 dark:text-gray-100\">{title}</h2>\n          <button class=\"text-gray-500 hover:text-gray-700 dark:hover:text-gray-300\" onclick={onclose}>\n            <X size={18} />\n          </button>\n        </div>\n      {/if}\n      <div class=\"p-5\">\n        {@render children()}\n      </div>\n    </div>\n  </div>\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/common/ProRequired.svelte",
    "content": "<script lang=\"ts\">\n  import { openSingletonTab } from '../../stores/tabs.svelte'\n  import {\n    ShieldAlert,\n    Settings,\n    ExternalLink,\n    CheckCircle2,\n    Lock,\n  } from 'lucide-svelte'\n  import logo from '../../../assets/logo.png'\n\n  interface Props {\n    feature: string\n  }\n\n  let { feature }: Props = $props()\n\n  function openSettings() {\n    openSingletonTab('settings', 'License')\n  }\n\n  const proFeatures = [\n    'Scheduled query jobs',\n    'Governance, Access and Policy controls',\n  ]\n</script>\n\n<div class=\"h-full overflow-auto p-6\">\n  <div class=\"mx-auto max-w-2xl\">\n    <div class=\"ds-panel overflow-hidden\">\n      <!-- Header -->\n      <div class=\"px-6 py-5 border-b border-orange-300/25 dark:border-orange-700/35 bg-gradient-to-r from-orange-100/60 via-transparent to-transparent dark:from-orange-500/10\">\n        <div class=\"flex flex-col gap-4 md:flex-row md:items-center md:justify-between\">\n          <div class=\"flex items-center gap-4 min-w-0\">\n            <div class=\"h-14 w-14 rounded-2xl border border-orange-300/40 dark:border-orange-700/40 bg-gray-100 dark:bg-gray-900 grid place-items-center overflow-hidden shrink-0\">\n              <img src={logo} alt=\"CH-UI logo\" class=\"h-11 w-11 object-contain\" />\n            </div>\n            <div>\n              <div class=\"flex items-center gap-2\">\n                <ShieldAlert size={18} class=\"text-ch-orange\" />\n                <h1 class=\"text-xl font-semibold text-gray-900 dark:text-gray-100\">Pro license required</h1>\n              </div>\n              <p class=\"text-sm text-gray-600 dark:text-gray-400 mt-1\">\n                <span class=\"text-ch-orange font-medium\">{feature}</span> is part of CH-UI Pro.\n              </p>\n            </div>\n          </div>\n        </div>\n      </div>\n\n      <!-- Content -->\n      <div class=\"p-6 space-y-6\">\n        <!-- Locked feature callout -->\n        <div class=\"flex items-center gap-3 rounded-lg border border-orange-300/30 dark:border-orange-700/30 bg-orange-50/50 dark:bg-orange-900/10 px-4 py-3\">\n          <Lock size={16} class=\"text-ch-orange shrink-0\" />\n          <p class=\"text-sm text-gray-700 dark:text-gray-300\">\n            Activate a Pro license in <strong>License</strong> to unlock this feature, or get a license if you don't have one yet.\n          </p>\n        </div>\n\n        <!-- What you unlock -->\n        <div>\n          <h3 class=\"text-sm font-semibold text-gray-900 dark:text-gray-100 mb-3\">What you unlock with Pro</h3>\n          <ul class=\"space-y-2\">\n            {#each proFeatures as feat}\n              <li class=\"flex items-center gap-2 text-sm text-gray-600 dark:text-gray-400\">\n                <CheckCircle2 size={14} class=\"text-ch-orange shrink-0\" />\n                {feat}\n              </li>\n            {/each}\n          </ul>\n        </div>\n\n        <!-- Actions -->\n        <div class=\"flex flex-col sm:flex-row items-start sm:items-center gap-3 pt-2 border-t border-gray-200 dark:border-gray-800\">\n          <button class=\"ds-btn-primary px-4 py-2\" onclick={openSettings}>\n            <Settings size={14} />\n            Manage License\n          </button>\n\n          <a\n            class=\"inline-flex items-center gap-1.5 text-sm text-ch-orange hover:underline\"\n            href=\"https://ch-ui.com/pricing\"\n            target=\"_blank\"\n            rel=\"noreferrer\"\n          >\n            Get a License <ExternalLink size={12} />\n          </a>\n        </div>\n      </div>\n    </div>\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/common/Sheet.svelte",
    "content": "<script lang=\"ts\">\n  import type { Snippet } from 'svelte'\n  import { fly, fade } from 'svelte/transition'\n  import { X } from 'lucide-svelte'\n\n  interface Props {\n    open: boolean\n    title?: string\n    size?: 'sm' | 'md' | 'lg' | 'xl'\n    onclose: () => void\n    children: Snippet\n  }\n\n  let { open, title = '', size = 'md', onclose, children }: Props = $props()\n\n  const sizeClasses: Record<string, string> = {\n    sm: 'max-w-md',\n    md: 'max-w-xl',\n    lg: 'max-w-3xl',\n    xl: 'max-w-6xl',\n  }\n\n  function handleKeydown(e: KeyboardEvent) {\n    if (e.key === 'Escape') onclose()\n  }\n</script>\n\n<svelte:window onkeydown={handleKeydown} />\n\n{#if open}\n  <!-- Backdrop -->\n  <div\n    class=\"fixed inset-0 z-40 bg-black/60 backdrop-blur-sm\"\n    onclick={onclose}\n    role=\"presentation\"\n    transition:fade={{ duration: 150 }}\n  ></div>\n\n  <!-- Sheet -->\n  <div\n    class=\"fixed inset-y-0 right-0 z-50 w-full {sizeClasses[size]} flex flex-col bg-gray-50/95 dark:bg-gray-900/95 backdrop-blur-xl border-l border-gray-200/80 dark:border-gray-800/80 shadow-2xl\"\n    role=\"dialog\"\n    aria-modal=\"true\"\n    transition:fly={{ x: 300, duration: 200 }}\n  >\n    {#if title}\n      <div class=\"flex items-center justify-between px-5 py-4 border-b border-gray-200 dark:border-gray-800 shrink-0\">\n        <h2 class=\"text-lg font-semibold text-gray-900 dark:text-gray-100\">{title}</h2>\n        <button class=\"text-gray-500 hover:text-gray-700 dark:hover:text-gray-300\" onclick={onclose}>\n          <X size={18} />\n        </button>\n      </div>\n    {/if}\n    <div class=\"flex-1 overflow-auto p-5\">\n      {@render children()}\n    </div>\n  </div>\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/common/Spinner.svelte",
    "content": "<script lang=\"ts\">\n  interface Props {\n    size?: 'sm' | 'md' | 'lg'\n    class?: string\n  }\n\n  let { size = 'md', class: cls = '' }: Props = $props()\n\n  const sizes: Record<string, string> = {\n    sm: 'h-4 w-4',\n    md: 'h-6 w-6',\n    lg: 'h-10 w-10',\n  }\n</script>\n\n<svg class=\"animate-spin {sizes[size]} {cls}\" viewBox=\"0 0 24 24\" fill=\"none\">\n  <circle class=\"opacity-25\" cx=\"12\" cy=\"12\" r=\"10\" stroke=\"currentColor\" stroke-width=\"4\" />\n  <path class=\"opacity-75\" fill=\"currentColor\" d=\"M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z\" />\n</svg>\n"
  },
  {
    "path": "ui/src/lib/components/common/Toast.svelte",
    "content": "<script lang=\"ts\">\n  import { Toaster } from 'svelte-sonner'\n</script>\n\n<Toaster\n  position=\"top-right\"\n  richColors={true}\n  closeButton={true}\n  visibleToasts={5}\n  expand={true}\n  duration={4400}\n  offset={16}\n  mobileOffset={12}\n  gap={10}\n  containerAriaLabel=\"CH-UI notifications\"\n/>\n\n<style>\n  :global([data-sonner-toaster]) {\n    --width: min(92vw, 26rem);\n  }\n\n  :global([data-sonner-toast][data-styled='true']) {\n    border-radius: 12px;\n    backdrop-filter: blur(8px);\n    box-shadow: 0 14px 30px rgba(0, 0, 0, 0.28);\n  }\n\n  :global([data-sonner-toast][data-styled='true'] [data-title]) {\n    font-size: 13px;\n    font-weight: 600;\n    line-height: 1.35;\n  }\n\n  :global([data-sonner-toast][data-styled='true'] [data-description]) {\n    font-size: 12px;\n    line-height: 1.4;\n    opacity: 0.9;\n  }\n\n  :global([data-sonner-toast][data-styled='true'] [data-close-button]) {\n    opacity: 0.72;\n  }\n\n  :global([data-sonner-toast][data-styled='true'] [data-close-button]:hover) {\n    opacity: 1;\n  }\n</style>\n"
  },
  {
    "path": "ui/src/lib/components/dashboard/ChartPanel.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount, onDestroy } from 'svelte'\n  import uPlot from 'uplot'\n  import 'uplot/dist/uPlot.min.css'\n  import type { PanelConfig } from '../../types/api'\n  import { toUPlotData, DEFAULT_COLORS, isDateType, isNumericType, type ColumnMeta } from '../../utils/chart-transform'\n  import { getTheme } from '../../stores/theme.svelte'\n\n  interface Props {\n    data: Record<string, unknown>[]\n    meta: ColumnMeta[]\n    config: PanelConfig\n  }\n\n  let { data, meta, config }: Props = $props()\n\n  let container: HTMLDivElement\n  let chart: uPlot | undefined\n  let themeObserver: MutationObserver | undefined\n  let measuredWidth = $state(0)\n  let measuredHeight = $state(0)\n\n  const plotData = $derived(toUPlotData(data, meta, config))\n\n  /** True when x column is categorical (String, not Date/Numeric) */\n  const isCategorical = $derived.by(() => {\n    const xMeta = meta.find(m => m.name === config.xColumn)\n    if (!xMeta) return false\n    return !isDateType(xMeta.type) && !isNumericType(xMeta.type)\n  })\n\n  /** Category labels for the x-axis (only populated when categorical) */\n  const xLabels = $derived(\n    isCategorical ? data.map(row => String(row[config.xColumn!] ?? '')) : []\n  )\n\n  function isDark(): boolean {\n    return getTheme() === 'dark'\n  }\n\n  function axisColor(): string {\n    return isDark() ? '#6b7280' : '#9ca3af'\n  }\n\n  function gridColor(): string {\n    return isDark() ? 'rgba(75,85,99,0.3)' : 'rgba(209,213,219,0.5)'\n  }\n\n  function escapeHtml(s: string): string {\n    return s.replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/\"/g, '&quot;')\n  }\n\n  function tooltipPlugin(isTime: boolean, isCat: boolean, catLabels: string[]): uPlot.Plugin {\n    let tooltip: HTMLDivElement\n\n    function init(u: uPlot) {\n      tooltip = document.createElement('div')\n      Object.assign(tooltip.style, {\n        position: 'absolute',\n        display: 'none',\n        pointerEvents: 'none',\n        background: 'rgba(24,24,27,0.94)',\n        color: '#f3f4f6',\n        borderRadius: '6px',\n        padding: '8px 10px',\n        fontSize: '11px',\n        lineHeight: '1.5',\n        boxShadow: '0 4px 12px rgba(0,0,0,0.3)',\n        zIndex: '100',\n        whiteSpace: 'nowrap',\n        fontFamily: 'ui-monospace, monospace',\n      })\n      u.over.appendChild(tooltip)\n\n      u.over.addEventListener('mouseleave', () => { tooltip.style.display = 'none' })\n      u.over.addEventListener('mouseenter', () => { tooltip.style.display = 'block' })\n    }\n\n    function setCursor(u: uPlot) {\n      const { idx, left, top } = u.cursor\n      if (idx == null || left == null || top == null) {\n        tooltip.style.display = 'none'\n        return\n      }\n\n      const xVal = u.data[0][idx]\n      const header = isTime\n        ? new Date(xVal * 1000).toLocaleString()\n        : isCat\n          ? (catLabels[idx] ?? String(xVal))\n          : xVal.toLocaleString()\n\n      // Build tooltip using DOM methods to prevent XSS from series labels or values\n      tooltip.textContent = ''\n      const headerDiv = document.createElement('div')\n      Object.assign(headerDiv.style, { fontWeight: '600', marginBottom: '4px', color: '#e4e4e7' })\n      headerDiv.textContent = String(header)\n      tooltip.appendChild(headerDiv)\n\n      for (let i = 1; i < u.series.length; i++) {\n        const s = u.series[i]\n        if (!s.show) continue\n        const val = u.data[i][idx]\n        const display = val == null\n          ? '\\u2014'\n          : Number(val).toLocaleString(undefined, { maximumFractionDigits: 2 })\n        const color = typeof s.stroke === 'function' ? (s.stroke as Function)(u, i) : s.stroke\n\n        const row = document.createElement('div')\n        Object.assign(row.style, { display: 'flex', alignItems: 'center', gap: '6px' })\n\n        const dot = document.createElement('span')\n        Object.assign(dot.style, { width: '8px', height: '8px', borderRadius: '50%', background: String(color ?? ''), flexShrink: '0' })\n        row.appendChild(dot)\n\n        const label = document.createElement('span')\n        Object.assign(label.style, { color: '#a1a1aa', flex: '1' })\n        label.textContent = String(s.label ?? '')\n        row.appendChild(label)\n\n        const value = document.createElement('span')\n        Object.assign(value.style, { fontWeight: '600', marginLeft: '12px' })\n        value.textContent = display\n        row.appendChild(value)\n\n        tooltip.appendChild(row)\n      }\n\n      const ow = u.over.clientWidth\n      const tw = tooltip.offsetWidth\n      const th = tooltip.offsetHeight\n      const pad = 10\n\n      let x = left + pad\n      let y = top - th - pad\n\n      if (x + tw > ow) x = left - tw - pad\n      if (y < 0) y = top + pad\n\n      tooltip.style.left = x + 'px'\n      tooltip.style.top = y + 'px'\n      tooltip.style.display = 'block'\n    }\n\n    return { hooks: { init, setCursor } }\n  }\n\n  function buildOpts(w: number, h: number): uPlot.Options {\n    const xMeta = meta.find(m => m.name === config.xColumn)\n    const isTime = xMeta ? isDateType(xMeta.type) : false\n    const isCat = isCategorical\n    const catLabels = xLabels\n    const colors = config.colors?.length ? config.colors : DEFAULT_COLORS\n\n    const series: uPlot.Series[] = [\n      { label: config.xColumn ?? 'X' },\n    ]\n\n    const yColumns = config.yColumns ?? []\n    for (let i = 0; i < yColumns.length; i++) {\n      const color = colors[i % colors.length]\n      const s: uPlot.Series = {\n        label: yColumns[i],\n        stroke: color,\n        width: 2,\n      }\n\n      if (config.chartType === 'bar') {\n        s.fill = color + '80'\n        s.paths = uPlot.paths.bars!({ size: [0.6, 100] })\n      } else {\n        s.fill = color + '1A'\n      }\n\n      series.push(s)\n    }\n\n    // Build x-axis config based on data type\n    let xAxisConfig: Record<string, any> = {}\n    if (isTime) {\n      // default time formatting\n    } else if (isCat) {\n      // Categorical: show category labels at exact index positions\n      xAxisConfig = {\n        splits: (_u: uPlot) => catLabels.map((_: string, i: number) => i),\n        values: (_u: uPlot, splits: number[]) => splits.map(i => catLabels[i] ?? ''),\n        gap: 8,\n        size: catLabels.length > 6 ? 60 : 40,\n        rotate: catLabels.length > 6 ? -45 : 0,\n      }\n    } else {\n      xAxisConfig = {\n        values: (_u: uPlot, vals: number[]) => vals.map(v => String(v)),\n      }\n    }\n\n    return {\n      width: w,\n      height: h,\n      series,\n      plugins: [tooltipPlugin(isTime, isCat, catLabels)],\n      axes: [\n        {\n          stroke: axisColor(),\n          grid: { stroke: gridColor(), width: 1 },\n          ticks: { stroke: gridColor(), width: 1 },\n          ...xAxisConfig,\n        },\n        {\n          stroke: axisColor(),\n          grid: { stroke: gridColor(), width: 1 },\n          ticks: { stroke: gridColor(), width: 1 },\n        },\n      ],\n      scales: {\n        x: isTime\n          ? { time: true }\n          : isCat\n            ? { time: false, distr: 2 }\n            : { time: false },\n      },\n      legend: { show: false },\n      cursor: { drag: { x: true, y: false } },\n    }\n  }\n\n  function createChart() {\n    if (chart) {\n      chart.destroy()\n      chart = undefined\n    }\n    if (!container || !plotData[0]?.length || measuredWidth <= 0 || measuredHeight <= 0) return\n\n    const opts = buildOpts(measuredWidth, measuredHeight)\n    chart = new uPlot(opts, plotData, container)\n  }\n\n  // ResizeObserver — measure container, use setSize for efficient resize\n  $effect(() => {\n    if (!container) return\n    const ro = new ResizeObserver(entries => {\n      const { width: w, height: h } = entries[0].contentRect\n      const fw = Math.floor(w)\n      const fh = Math.floor(h)\n      if (fw > 0 && fh > 0 && (fw !== measuredWidth || fh !== measuredHeight)) {\n        measuredWidth = fw\n        measuredHeight = fh\n      }\n    })\n    ro.observe(container)\n    return () => ro.disconnect()\n  })\n\n  // Resize chart efficiently when container dimensions change\n  $effect(() => {\n    if (!chart || measuredWidth <= 0 || measuredHeight <= 0) return\n    chart.setSize({ width: measuredWidth, height: measuredHeight })\n  })\n\n  // Recreate chart when data/config changes (not dimensions)\n  $effect(() => {\n    void plotData\n    void config.chartType\n    void config.xColumn\n    void config.yColumns\n    void config.colors\n\n    if (container && measuredWidth > 0 && measuredHeight > 0) {\n      createChart()\n    }\n  })\n\n  onMount(() => {\n    themeObserver = new MutationObserver(() => {\n      createChart()\n    })\n    themeObserver.observe(document.documentElement, { attributes: true, attributeFilter: ['class'] })\n  })\n\n  onDestroy(() => {\n    themeObserver?.disconnect()\n    chart?.destroy()\n  })\n</script>\n\n<div bind:this={container} class=\"w-full h-full\"></div>\n"
  },
  {
    "path": "ui/src/lib/components/dashboard/DashboardGrid.svelte",
    "content": "<script lang=\"ts\">\n  import type { Panel, PanelConfig } from '../../types/api'\n  import { apiPut } from '../../api/client'\n  import { error as toastError } from '../../stores/toast.svelte'\n  import Spinner from '../common/Spinner.svelte'\n  import ChartPanel from './ChartPanel.svelte'\n  import { getStatValue } from '../../utils/chart-transform'\n  import {\n    COLS, ROW_H, GAP, MIN_W, MIN_H,\n    calcColW, gridToPixel, compact, containerHeight,\n    type LayoutItem,\n  } from '../../utils/grid-layout'\n  import { Pencil, Trash2, GripVertical } from 'lucide-svelte'\n\n  interface Props {\n    dashboardId: string\n    panels: Panel[]\n    panelResults: Map<string, { data: any[]; meta: any[]; error?: string; loading: boolean }>\n    onpanelschange: (panels: Panel[]) => void\n    oneditpanel: (panel: Panel) => void\n    ondeletepanel: (panelId: string) => void\n  }\n\n  let { dashboardId, panels, panelResults, onpanelschange, oneditpanel, ondeletepanel }: Props = $props()\n\n  let gridEl = $state<HTMLDivElement>(undefined!)\n  let containerWidth = $state(0)\n  const colW = $derived(calcColW(containerWidth))\n\n  // Drag/resize state\n  type Mode = 'idle' | 'dragging' | 'resizing'\n  let mode = $state<Mode>('idle')\n  let activeId = $state<string | null>(null)\n  let startPointer = $state({ x: 0, y: 0 })\n  let startLayout = $state({ x: 0, y: 0, w: 0, h: 0 })\n  let ghostLayout = $state<{ x: number; y: number; w: number; h: number } | null>(null)\n  // Store the scroll position at drag start so we account for scrolling during drag\n  let startScrollTop = $state(0)\n\n  function panelToLayout(p: Panel): LayoutItem {\n    return { id: p.id, x: p.layout_x, y: p.layout_y, w: p.layout_w, h: p.layout_h }\n  }\n\n  // Compacted display layout\n  const displayLayouts = $derived.by(() => {\n    const items = panels.map(p => {\n      if (p.id === activeId && ghostLayout) {\n        return { id: p.id, ...ghostLayout }\n      }\n      return panelToLayout(p)\n    })\n    return compact(items, activeId ?? undefined)\n  })\n\n  const totalHeight = $derived(containerHeight(displayLayouts))\n\n  // Measure container width\n  $effect(() => {\n    if (!gridEl) return\n    const ro = new ResizeObserver(entries => {\n      containerWidth = entries[0].contentRect.width\n    })\n    ro.observe(gridEl)\n    return () => ro.disconnect()\n  })\n\n  function parsePanelConfig(configStr: string): PanelConfig {\n    try {\n      return JSON.parse(configStr || '{}')\n    } catch {\n      return { chartType: 'table' }\n    }\n  }\n\n  // --- Drag handlers ---\n\n  function handleDragStart(panelId: string, e: PointerEvent) {\n    if (e.button !== 0) return\n    const panel = panels.find(p => p.id === panelId)\n    if (!panel) return\n    e.preventDefault()\n    mode = 'dragging'\n    activeId = panelId\n    startPointer = { x: e.clientX, y: e.clientY }\n    startLayout = { x: panel.layout_x, y: panel.layout_y, w: panel.layout_w, h: panel.layout_h }\n    ghostLayout = { ...startLayout }\n    startScrollTop = gridEl.parentElement?.scrollTop ?? 0\n    gridEl.setPointerCapture(e.pointerId)\n  }\n\n  function handleResizeStart(panelId: string, e: PointerEvent) {\n    if (e.button !== 0) return\n    const panel = panels.find(p => p.id === panelId)\n    if (!panel) return\n    e.preventDefault()\n    e.stopPropagation()\n    mode = 'resizing'\n    activeId = panelId\n    startPointer = { x: e.clientX, y: e.clientY }\n    startLayout = { x: panel.layout_x, y: panel.layout_y, w: panel.layout_w, h: panel.layout_h }\n    ghostLayout = { ...startLayout }\n    startScrollTop = gridEl.parentElement?.scrollTop ?? 0\n    gridEl.setPointerCapture(e.pointerId)\n  }\n\n  function handlePointerMove(e: PointerEvent) {\n    if (mode === 'idle' || !ghostLayout) return\n\n    const dx = e.clientX - startPointer.x\n    const dy = e.clientY - startPointer.y + ((gridEl.parentElement?.scrollTop ?? 0) - startScrollTop)\n    const cellW = colW + GAP\n    const cellH = ROW_H + GAP\n\n    if (mode === 'dragging') {\n      const gridDx = Math.round(dx / cellW)\n      const gridDy = Math.round(dy / cellH)\n      ghostLayout = {\n        x: Math.max(0, Math.min(COLS - startLayout.w, startLayout.x + gridDx)),\n        y: Math.max(0, startLayout.y + gridDy),\n        w: startLayout.w,\n        h: startLayout.h,\n      }\n    } else if (mode === 'resizing') {\n      const gridDw = Math.round(dx / cellW)\n      const gridDh = Math.round(dy / cellH)\n      ghostLayout = {\n        x: startLayout.x,\n        y: startLayout.y,\n        w: Math.max(MIN_W, Math.min(COLS - startLayout.x, startLayout.w + gridDw)),\n        h: Math.max(MIN_H, startLayout.h + gridDh),\n      }\n    }\n\n    // Auto-scroll when near edges\n    const scrollParent = gridEl.parentElement\n    if (scrollParent) {\n      const rect = scrollParent.getBoundingClientRect()\n      const edge = 60\n      if (e.clientY > rect.bottom - edge) {\n        scrollParent.scrollTop += 8\n      } else if (e.clientY < rect.top + edge) {\n        scrollParent.scrollTop -= 8\n      }\n    }\n  }\n\n  function handlePointerUp(e: PointerEvent) {\n    if (mode === 'idle' || !activeId) return\n    gridEl.releasePointerCapture(e.pointerId)\n\n    // Apply final compacted layout\n    const finalLayouts = displayLayouts\n    const originalPanels = panels\n    const updatedPanels = panels.map(p => {\n      const layout = finalLayouts.find(l => l.id === p.id)\n      if (!layout) return p\n      if (p.layout_x !== layout.x || p.layout_y !== layout.y ||\n          p.layout_w !== layout.w || p.layout_h !== layout.h) {\n        return { ...p, layout_x: layout.x, layout_y: layout.y, layout_w: layout.w, layout_h: layout.h }\n      }\n      return p\n    })\n\n    onpanelschange(updatedPanels)\n    persistLayouts(updatedPanels, originalPanels)\n\n    mode = 'idle'\n    activeId = null\n    ghostLayout = null\n  }\n\n  async function persistLayouts(updated: Panel[], original: Panel[]) {\n    const changed = updated.filter(p => {\n      const orig = original.find(o => o.id === p.id)\n      if (!orig) return false\n      return orig.layout_x !== p.layout_x || orig.layout_y !== p.layout_y ||\n             orig.layout_w !== p.layout_w || orig.layout_h !== p.layout_h\n    })\n    if (changed.length === 0) return\n\n    try {\n      await Promise.all(changed.map(p =>\n        apiPut(`/api/dashboards/${dashboardId}/panels/${p.id}`, {\n          layout_x: p.layout_x,\n          layout_y: p.layout_y,\n          layout_w: p.layout_w,\n          layout_h: p.layout_h,\n        })\n      ))\n    } catch (e: any) {\n      toastError('Failed to save layout: ' + e.message)\n    }\n  }\n</script>\n\n{#if panels.length === 0}\n  <div class=\"text-center py-12 text-gray-500\">\n    <p class=\"mb-1\">No panels yet</p>\n    <p class=\"text-xs text-gray-400 dark:text-gray-600\">Add a panel with a SQL query to visualize data</p>\n  </div>\n{:else}\n  <!-- svelte-ignore a11y_no_static_element_interactions -->\n  <div\n    bind:this={gridEl}\n    class=\"relative w-full\"\n    style=\"min-height: {totalHeight}px;\"\n    onpointermove={handlePointerMove}\n    onpointerup={handlePointerUp}\n  >\n    {#each panels as panel (panel.id)}\n      {@const layout = displayLayouts.find(l => l.id === panel.id)}\n      {@const pos = layout && colW > 0 ? gridToPixel(layout, colW) : null}\n      {@const result = panelResults.get(panel.id)}\n      {@const cfg = parsePanelConfig(panel.config)}\n      {@const isActive = (mode === 'dragging' || mode === 'resizing') && activeId === panel.id}\n      {#if pos}\n        <div\n          class=\"absolute flex flex-col bg-gray-50 dark:bg-gray-900 border rounded-lg overflow-hidden group\n            {isActive ? 'border-ch-blue ring-2 ring-ch-blue/30 z-20' : 'border-gray-200 dark:border-gray-800'}\"\n          style=\"left:{pos.left}px; top:{pos.top}px; width:{pos.width}px; height:{pos.height}px;\n            {isActive ? '' : 'transition: left 0.15s ease, top 0.15s ease, width 0.15s ease, height 0.15s ease;'}\"\n        >\n          <!-- Panel header — drag handle -->\n          <!-- svelte-ignore a11y_no_static_element_interactions -->\n          <div\n            class=\"flex items-center justify-between px-3 py-2 border-b border-gray-200 dark:border-gray-800 bg-gray-100/50 dark:bg-gray-800/50 cursor-grab active:cursor-grabbing select-none shrink-0\"\n            onpointerdown={(e) => handleDragStart(panel.id, e)}\n          >\n            <div class=\"flex items-center gap-2 min-w-0\">\n              <GripVertical size={12} class=\"text-gray-400 shrink-0 opacity-0 group-hover:opacity-100 transition-opacity\" />\n              <span class=\"text-xs font-medium text-gray-700 dark:text-gray-300 truncate\">{panel.name}</span>\n\n            </div>\n            <div class=\"flex items-center gap-1 opacity-0 group-hover:opacity-100 transition-opacity\">\n              <button\n                class=\"p-1 rounded text-gray-400 hover:text-ch-blue hover:bg-gray-200 dark:hover:bg-gray-700\"\n                onpointerdown={(e) => e.stopPropagation()}\n                onclick={() => oneditpanel(panel)}\n                title=\"Edit\"\n              >\n                <Pencil size={12} />\n              </button>\n              <button\n                class=\"p-1 rounded text-gray-400 hover:text-red-400 hover:bg-gray-200 dark:hover:bg-gray-700\"\n                onpointerdown={(e) => e.stopPropagation()}\n                onclick={() => ondeletepanel(panel.id)}\n                title=\"Delete\"\n              >\n                <Trash2 size={12} />\n              </button>\n            </div>\n          </div>\n\n          <!-- Panel content -->\n          <div class=\"flex-1 overflow-hidden p-2\">\n            {#if !result || result.loading}\n              <div class=\"flex items-center justify-center h-full\"><Spinner size=\"sm\" /></div>\n            {:else if result.error}\n              <p class=\"text-xs text-red-500 p-2\">{result.error}</p>\n            {:else if panel.panel_type === 'stat'}\n              <div class=\"flex items-center justify-center h-full\">\n                <span class=\"text-3xl font-bold text-gray-900 dark:text-gray-100\">{getStatValue(result.data, result.meta)}</span>\n              </div>\n            {:else if panel.panel_type === 'timeseries' || panel.panel_type === 'bar'}\n              <ChartPanel data={result.data} meta={result.meta} config={cfg} />\n            {:else}\n              <!-- Table -->\n              {#if result.meta.length > 0}\n                <div class=\"overflow-auto h-full\">\n                  <table class=\"w-full text-xs\">\n                    <thead>\n                      <tr class=\"border-b border-gray-200 dark:border-gray-800\">\n                        {#each result.meta as col}\n                          <th class=\"text-left py-1 px-2 text-gray-500 font-medium whitespace-nowrap\">{col.name}</th>\n                        {/each}\n                      </tr>\n                    </thead>\n                    <tbody>\n                      {#each result.data.slice(0, 100) as row}\n                        <tr class=\"border-b border-gray-100 dark:border-gray-900\">\n                          {#each result.meta as col}\n                            <td class=\"py-1 px-2 text-gray-700 dark:text-gray-300 whitespace-nowrap\">{row[col.name] ?? '--'}</td>\n                          {/each}\n                        </tr>\n                      {/each}\n                    </tbody>\n                  </table>\n                </div>\n              {:else}\n                <p class=\"text-xs text-gray-500 p-2\">No data</p>\n              {/if}\n            {/if}\n          </div>\n\n          <!-- Resize handle -->\n          <!-- svelte-ignore a11y_no_static_element_interactions -->\n          <div\n            class=\"absolute bottom-0 right-0 w-5 h-5 cursor-se-resize opacity-0 group-hover:opacity-100 transition-opacity flex items-end justify-end p-0.5\"\n            onpointerdown={(e) => handleResizeStart(panel.id, e)}\n          >\n            <svg viewBox=\"0 0 6 6\" class=\"w-3 h-3 text-gray-400\">\n              <circle cx=\"5\" cy=\"1\" r=\"0.7\" fill=\"currentColor\" />\n              <circle cx=\"5\" cy=\"5\" r=\"0.7\" fill=\"currentColor\" />\n              <circle cx=\"1\" cy=\"5\" r=\"0.7\" fill=\"currentColor\" />\n            </svg>\n          </div>\n        </div>\n      {/if}\n    {/each}\n\n    <!-- Ghost placeholder during drag/resize -->\n    {#if ghostLayout && mode !== 'idle' && colW > 0}\n      {@const ghostPos = gridToPixel(ghostLayout, colW)}\n      <div\n        class=\"absolute rounded-lg border-2 border-dashed border-ch-blue/60 bg-ch-blue/12 pointer-events-none z-10\"\n        style=\"left:{ghostPos.left}px; top:{ghostPos.top}px; width:{ghostPos.width}px; height:{ghostPos.height}px;\n          transition: left 0.1s ease, top 0.1s ease, width 0.1s ease, height 0.1s ease;\"\n      ></div>\n    {/if}\n  </div>\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/dashboard/PanelEditor.svelte",
    "content": "<script lang=\"ts\">\n  import type { Panel, PanelConfig } from '../../types/api'\n  import type { ColumnMeta } from '../../types/query'\n  import { apiPost, apiPut } from '../../api/client'\n  import { formatSQL } from '../../api/query'\n  import { success as toastSuccess, error as toastError } from '../../stores/toast.svelte'\n  import Button from '../common/Button.svelte'\n  import Combobox from '../common/Combobox.svelte'\n  import type { ComboboxOption } from '../common/Combobox.svelte'\n  import Spinner from '../common/Spinner.svelte'\n  import Toolbar from '../editor/Toolbar.svelte'\n  import SqlEditor from '../editor/SqlEditor.svelte'\n  import VirtualTable from '../table/VirtualTable.svelte'\n  import ChartPanel from './ChartPanel.svelte'\n  import { isDateType, isNumericType, getStatValue, DEFAULT_COLORS } from '../../utils/chart-transform'\n  import { formatDashboardTimeRangeLabel } from '../../utils/dashboard-time'\n  import { toDashboardTimeRangePayload } from '../../utils/dashboard-time'\n  import { Table2, Hash, TrendingUp, BarChart3, PanelsTopLeft } from 'lucide-svelte'\n\n  interface Props {\n    dashboardId: string\n    dashboardTimeRange?: string\n    panel?: Panel | null\n    onclose: () => void\n    onsave: (panel: Panel) => void\n  }\n\n  let { dashboardId, dashboardTimeRange = '1h', panel = null, onclose, onsave }: Props = $props()\n\n  // Form state\n  let name = $state('')\n  let query = $state('')\n  let saving = $state(false)\n  let running = $state(false)\n  let formatting = $state(false)\n\n  let chartType = $state<PanelConfig['chartType']>('table')\n  let xColumn = $state('')\n  let yColumns = $state<string[]>([])\n  let colors = $state<string[]>([...DEFAULT_COLORS])\n  let legendPosition = $state<'bottom' | 'right' | 'none'>('bottom')\n\n  // Query result state\n  let queryData = $state<Record<string, unknown>[]>([])\n  let queryMeta = $state<ColumnMeta[]>([])\n  let queryError = $state<string | null>(null)\n\n  let editorComponent: SqlEditor | undefined = $state()\n\n  // Derived: VirtualTable needs positional arrays\n  const vtData = $derived(queryData.map(row => queryMeta.map(col => row[col.name])))\n  const vtMeta = $derived(queryMeta.map(m => ({ name: m.name, type: m.type })))\n\n  // Derived: Detect columns by type\n  const dateColumns = $derived(queryMeta.filter(m => isDateType(m.type)))\n  const numericColumns = $derived(queryMeta.filter(m => isNumericType(m.type)))\n\n  // Current config for chart preview\n  const currentConfig = $derived<PanelConfig>({\n    chartType,\n    xColumn,\n    yColumns,\n    colors,\n    legendPosition,\n  })\n\n  const dashboardRangeLabel = $derived(formatDashboardTimeRangeLabel(dashboardTimeRange))\n  const xAxisOptions = $derived.by<ComboboxOption[]>(() => [\n    { value: '', label: 'Select column...' },\n    ...queryMeta.map(col => ({ value: col.name, label: `${col.name}`, hint: col.type, keywords: `${col.name} ${col.type}` })),\n  ])\n  const legendOptions: ComboboxOption[] = [\n    { value: 'bottom', label: 'Bottom' },\n    { value: 'right', label: 'Right' },\n    { value: 'none', label: 'Hidden' },\n  ]\n\n  function parsePanelConfig(value: Panel | null): Partial<PanelConfig> {\n    if (!value?.config) return {}\n    try {\n      return JSON.parse(value.config) as Partial<PanelConfig>\n    } catch {\n      return {}\n    }\n  }\n\n  $effect(() => {\n    const currentPanel = panel\n    const existingConfig = parsePanelConfig(currentPanel)\n\n    name = currentPanel?.name ?? ''\n    query = currentPanel?.query ?? ''\n    chartType = existingConfig.chartType ?? (currentPanel?.panel_type as PanelConfig['chartType']) ?? 'table'\n    xColumn = existingConfig.xColumn ?? ''\n    yColumns = existingConfig.yColumns ?? []\n    colors = existingConfig.colors ?? [...DEFAULT_COLORS]\n    legendPosition = existingConfig.legendPosition ?? 'bottom'\n    queryData = []\n    queryMeta = []\n    queryError = null\n  })\n\n  // Sync panel query into the CodeMirror editor after it mounts\n  $effect(() => {\n    const q = panel?.query ?? ''\n    if (editorComponent && q) {\n      const current = editorComponent.getValue()\n      if (current !== q) {\n        editorComponent.setValue(q)\n      }\n    }\n  })\n\n  // Auto-detect axes when results arrive\n  $effect(() => {\n    if (queryMeta.length > 0 && !xColumn && !yColumns.length) {\n      // Auto-pick first date column for X, or first column\n      const firstDate = dateColumns[0]\n      if (firstDate) {\n        xColumn = firstDate.name\n      } else if (queryMeta.length > 0) {\n        xColumn = queryMeta[0].name\n      }\n      // Auto-pick first numeric columns for Y\n      const autoY = numericColumns.filter(m => m.name !== xColumn).slice(0, 3)\n      if (autoY.length > 0) {\n        yColumns = autoY.map(m => m.name)\n      }\n    }\n  })\n\n\n  async function runQuery() {\n    const sql = editorComponent?.getValue() ?? query\n    if (!sql.trim()) {\n      queryError = 'Enter a query first'\n      return\n    }\n    running = true\n    queryError = null\n    queryData = []\n    queryMeta = []\n    try {\n      const res = await apiPost<{ data: any[]; meta: any[]; error?: string; success?: boolean }>('/api/dashboards/query', {\n        query: sql.trim(),\n        time_range: toDashboardTimeRangePayload(dashboardTimeRange || '1h'),\n      })\n      if (res.success === false) {\n        queryError = res.error ?? 'Query failed'\n      } else {\n        queryData = res.data ?? []\n        queryMeta = res.meta ?? []\n      }\n    } catch (e: any) {\n      queryError = e.message\n    } finally {\n      running = false\n    }\n  }\n\n  async function handleFormat() {\n    const sql = editorComponent?.getValue() ?? query\n    if (!sql.trim()) return\n    formatting = true\n    try {\n      const formatted = await formatSQL(sql)\n      editorComponent?.setValue(formatted)\n      query = formatted\n      toastSuccess('Query formatted')\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      formatting = false\n    }\n  }\n\n  function toggleYColumn(colName: string) {\n    if (yColumns.includes(colName)) {\n      yColumns = yColumns.filter(c => c !== colName)\n    } else {\n      yColumns = [...yColumns, colName]\n    }\n  }\n\n  function updateColor(index: number, color: string) {\n    const next = [...colors]\n    next[index] = color\n    colors = next\n  }\n\n  async function handleSave() {\n    const sql = editorComponent?.getValue() ?? query\n    if (!name.trim() || !sql.trim()) {\n      toastError('Name and query are required')\n      return\n    }\n    saving = true\n    const configJson = JSON.stringify(currentConfig)\n    try {\n      if (panel?.id) {\n        const res = await apiPut<{ panel: Panel }>(`/api/dashboards/${dashboardId}/panels/${panel.id}`, {\n          name: name.trim(),\n          panel_type: chartType,\n          query: sql.trim(),\n          config: configJson,\n        })\n        if (res.panel) onsave(res.panel)\n      } else {\n        const res = await apiPost<{ panel: Panel }>(`/api/dashboards/${dashboardId}/panels`, {\n          name: name.trim(),\n          panel_type: chartType,\n          query: sql.trim(),\n          config: configJson,\n          layout_x: 0,\n          layout_y: 0,\n          layout_w: 6,\n          layout_h: 4,\n        })\n        if (res.panel) onsave(res.panel)\n      }\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      saving = false\n    }\n  }\n\n  const vizTypes: { type: PanelConfig['chartType']; label: string; icon: typeof Table2 }[] = [\n    { type: 'table', label: 'Table', icon: Table2 },\n    { type: 'stat', label: 'Stat', icon: Hash },\n    { type: 'timeseries', label: 'Time Series', icon: TrendingUp },\n    { type: 'bar', label: 'Bar', icon: BarChart3 },\n  ]\n</script>\n\n<div class=\"flex flex-col h-full\">\n  <div class=\"flex items-center justify-between px-4 py-3 border-b border-gray-200 dark:border-gray-800\">\n    <div class=\"flex items-center gap-2\">\n      <PanelsTopLeft size={16} class=\"text-ch-orange\" />\n      <h2 class=\"text-sm font-semibold text-gray-900 dark:text-gray-100\">\n        {panel?.id ? 'Edit Panel' : 'New Panel'}\n      </h2>\n    </div>\n    <div class=\"flex items-center gap-2\">\n      <Button variant=\"secondary\" size=\"sm\" onclick={onclose}>Cancel</Button>\n      <Button size=\"sm\" loading={saving} onclick={handleSave}>\n        {panel?.id ? 'Update Panel' : 'Create Panel'}\n      </Button>\n    </div>\n  </div>\n\n  <!-- Editor: left/right split -->\n  <div class=\"flex flex-1 min-h-0 overflow-hidden\">\n    <!-- Left side: Query workspace -->\n    <div class=\"flex-[3] flex flex-col border-r border-gray-200 dark:border-gray-800 min-w-0\">\n      <Toolbar running={running} onrun={runQuery} onformat={handleFormat} onsave={handleSave} />\n\n      <!-- SQL Editor -->\n      <div class=\"h-[42%] min-h-[200px] shrink-0 border-b border-gray-200 dark:border-gray-800\">\n        <SqlEditor\n          bind:this={editorComponent}\n          value={query}\n          onrun={runQuery}\n          onchange={(v) => query = v}\n        />\n      </div>\n\n      <!-- Result area -->\n      <div class=\"flex-1 min-h-0 overflow-auto\">\n        {#if running}\n          <div class=\"flex items-center justify-center h-full\"><Spinner /></div>\n        {:else if queryError}\n          <div class=\"p-4\">\n            <p class=\"text-sm text-red-500 bg-red-50 dark:bg-red-900/20 rounded p-3\">{queryError}</p>\n          </div>\n        {:else if queryData.length === 0 && queryMeta.length === 0}\n          <div class=\"flex items-center justify-center h-full text-gray-400 text-sm\">\n            Run a query to see results\n          </div>\n        {:else if chartType === 'table'}\n          <VirtualTable meta={vtMeta} data={vtData} />\n        {:else if chartType === 'stat'}\n          <div class=\"flex items-center justify-center h-full\">\n            <span class=\"text-5xl font-bold text-gray-900 dark:text-gray-100\">\n              {getStatValue(queryData, queryMeta)}\n            </span>\n          </div>\n        {:else}\n          <ChartPanel\n            data={queryData}\n            meta={queryMeta}\n            config={currentConfig}\n          />\n        {/if}\n      </div>\n    </div>\n\n    <!-- Right side: Configuration -->\n    <div class=\"flex-[2] flex flex-col min-w-0 overflow-y-auto\">\n      <div class=\"p-4 flex flex-col gap-4\">\n        <!-- Panel name -->\n        <div>\n          <label for=\"panel-name\" class=\"block text-xs font-medium text-gray-700 dark:text-gray-300 mb-1\">Panel Name</label>\n          <input\n            id=\"panel-name\"\n            type=\"text\"\n            class=\"w-full text-sm bg-transparent border border-gray-300 dark:border-gray-700 rounded px-3 py-2 text-gray-800 dark:text-gray-200\"\n            placeholder=\"My Panel\"\n            bind:value={name}\n          />\n        </div>\n\n        <!-- Available variables -->\n        <details class=\"group/vars\">\n          <summary class=\"flex items-center gap-1.5 text-xs font-medium text-gray-500 dark:text-gray-400 cursor-pointer select-none hover:text-gray-700 dark:hover:text-gray-300 list-none [&::-webkit-details-marker]:hidden\">\n            <span class=\"text-[10px] transition-transform group-open/vars:rotate-90\">&#9654;</span>\n            Available Variables\n          </summary>\n          <div class=\"mt-2 space-y-1.5 text-[11px] text-gray-500 dark:text-gray-400\">\n            <div><code class=\"px-1 py-0.5 rounded bg-gray-200 dark:bg-gray-800 text-gray-600 dark:text-gray-300\">$__timestamp(col)</code> — DateTime range filter</div>\n            <div><code class=\"px-1 py-0.5 rounded bg-gray-200 dark:bg-gray-800 text-gray-600 dark:text-gray-300\">$__timeFilter(col)</code> — Epoch range filter</div>\n            <div><code class=\"px-1 py-0.5 rounded bg-gray-200 dark:bg-gray-800 text-gray-600 dark:text-gray-300\">$__interval</code> — Aggregation interval (seconds)</div>\n            <div><code class=\"px-1 py-0.5 rounded bg-gray-200 dark:bg-gray-800 text-gray-600 dark:text-gray-300\">$__timeFrom</code> / <code class=\"px-1 py-0.5 rounded bg-gray-200 dark:bg-gray-800 text-gray-600 dark:text-gray-300\">$__timeTo</code> — Range boundaries</div>\n          </div>\n        </details>\n\n        <!-- Visualization type -->\n        <div>\n          <p class=\"block text-xs font-medium text-gray-700 dark:text-gray-300 mb-2\">Visualization</p>\n          <div class=\"grid grid-cols-4 gap-1.5\">\n            {#each vizTypes as vt}\n              {@const Icon = vt.icon}\n              <button\n                class=\"flex flex-col items-center gap-1 py-2 px-1 rounded-md border text-xs transition-colors\n                  {chartType === vt.type\n                    ? 'border-ch-blue bg-orange-50 dark:bg-orange-900/20 text-ch-blue'\n                    : 'border-gray-200 dark:border-gray-700 text-gray-500 hover:border-gray-300 dark:hover:border-gray-600 hover:text-gray-700 dark:hover:text-gray-300'}\"\n                onclick={() => chartType = vt.type}\n              >\n                <Icon size={16} />\n                {vt.label}\n              </button>\n            {/each}\n          </div>\n        </div>\n\n        <!-- Chart config (only for timeseries/bar) -->\n        {#if chartType === 'timeseries' || chartType === 'bar'}\n          <div class=\"rounded-lg border border-gray-200 dark:border-gray-800 bg-gray-100/60 dark:bg-gray-900/60 px-2.5 py-2\">\n            <p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Time Scope</p>\n            <p class=\"mt-0.5 text-xs text-gray-700 dark:text-gray-300\">\n              Uses dashboard picker: <span class=\"text-ch-orange\">{dashboardRangeLabel}</span>\n            </p>\n          </div>\n\n          <!-- X-Axis column -->\n          <div>\n            <label for=\"x-axis-column\" class=\"block text-xs font-medium text-gray-700 dark:text-gray-300 mb-1\">X-Axis Column</label>\n            <Combobox\n              options={xAxisOptions}\n              value={xColumn}\n              onChange={(v) => xColumn = v}\n              placeholder=\"Select column...\"\n            />\n          </div>\n\n          <!-- Y-Axis columns -->\n          <div>\n            <p class=\"block text-xs font-medium text-gray-700 dark:text-gray-300 mb-1\">Y-Axis Columns</p>\n            {#if queryMeta.length === 0}\n              <p class=\"text-xs text-gray-400\">Run a query first</p>\n            {:else}\n              <div class=\"flex flex-col gap-1 max-h-40 overflow-y-auto\">\n                {#each queryMeta.filter(m => m.name !== xColumn) as col}\n                  <label class=\"flex items-center gap-2 text-xs text-gray-700 dark:text-gray-300 cursor-pointer hover:bg-gray-100 dark:hover:bg-gray-800 rounded px-2 py-1\">\n                    <input\n                      type=\"checkbox\"\n                      class=\"ds-checkbox ds-checkbox-sm\"\n                      checked={yColumns.includes(col.name)}\n                      onchange={() => toggleYColumn(col.name)}\n                    />\n                    <span class=\"truncate\">{col.name}</span>\n                    <span class=\"text-gray-400 ml-auto shrink-0\">{col.type}</span>\n                  </label>\n                {/each}\n              </div>\n            {/if}\n          </div>\n\n          <!-- Series colors -->\n          {#if yColumns.length > 0}\n            <div>\n              <p class=\"block text-xs font-medium text-gray-700 dark:text-gray-300 mb-1\">Series Colors</p>\n              <div class=\"flex flex-col gap-1.5\">\n                {#each yColumns as yCol, i}\n                  <div class=\"flex items-center gap-2\">\n                    <input\n                      type=\"color\"\n                      value={colors[i] ?? DEFAULT_COLORS[i % DEFAULT_COLORS.length]}\n                      oninput={(e) => updateColor(i, (e.target as HTMLInputElement).value)}\n                      class=\"w-6 h-6 rounded border border-gray-300 dark:border-gray-600 cursor-pointer\"\n                    />\n                    <span class=\"text-xs text-gray-600 dark:text-gray-400 truncate\">{yCol}</span>\n                  </div>\n                {/each}\n              </div>\n            </div>\n          {/if}\n\n          <!-- Legend position -->\n          <div>\n            <label for=\"legend-position\" class=\"block text-xs font-medium text-gray-700 dark:text-gray-300 mb-1\">Legend</label>\n            <Combobox\n              options={legendOptions}\n              value={legendPosition}\n              onChange={(v) => legendPosition = v as 'bottom' | 'right' | 'none'}\n              placeholder=\"Legend position\"\n            />\n          </div>\n        {/if}\n      </div>\n    </div>\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/dashboard/TimeRangeSelector.svelte",
    "content": "<script lang=\"ts\">\n  import {\n    decodeAbsoluteDashboardRange,\n    encodeAbsoluteDashboardRange,\n    formatDashboardTimeRangeLabel,\n    resolveNamedPreset,\n  } from '../../utils/dashboard-time'\n  import { Clock3, ChevronDown } from 'lucide-svelte'\n  import DualCalendar from './time-picker/DualCalendar.svelte'\n  import TimeInput from './time-picker/TimeInput.svelte'\n  import TimezoneSelect from './time-picker/TimezoneSelect.svelte'\n  import PresetList from './time-picker/PresetList.svelte'\n\n  interface Props {\n    value: string\n    onchange: (value: string) => void\n  }\n\n  let { value, onchange }: Props = $props()\n\n  let open = $state(false)\n  let rangeStart = $state<Date | null>(null)\n  let rangeEnd = $state<Date | null>(null)\n  let hoverDate = $state<Date | null>(null)\n  let fromTime = $state('00:00:00')\n  let toTime = $state('23:59:59')\n  let timezone = $state('UTC')\n  let rootEl: HTMLDivElement | null = null\n\n  const label = $derived(formatDashboardTimeRangeLabel(value))\n\n  // Seed calendar state when the picker opens\n  $effect(() => {\n    if (!open) return\n\n    // Try to decode the current value into calendar state\n    const absolute = decodeAbsoluteDashboardRange(value)\n    if (absolute) {\n      const from = new Date(absolute.from)\n      const to = new Date(absolute.to)\n      if (!isNaN(from.getTime()) && !isNaN(to.getTime())) {\n        rangeStart = new Date(from.getFullYear(), from.getMonth(), from.getDate())\n        rangeEnd = new Date(to.getFullYear(), to.getMonth(), to.getDate())\n        fromTime = pad(from.getUTCHours()) + ':' + pad(from.getUTCMinutes()) + ':' + pad(from.getUTCSeconds())\n        toTime = pad(to.getUTCHours()) + ':' + pad(to.getUTCMinutes()) + ':' + pad(to.getUTCSeconds())\n        return\n      }\n    }\n\n    // Named presets — resolve to get calendar dates\n    if (value.startsWith('preset:')) {\n      const resolved = resolveNamedPreset(value)\n      if (resolved) {\n        const from = new Date(resolved.from)\n        const to = new Date(resolved.to)\n        rangeStart = new Date(from.getFullYear(), from.getMonth(), from.getDate())\n        rangeEnd = new Date(to.getFullYear(), to.getMonth(), to.getDate())\n        fromTime = pad(from.getUTCHours()) + ':' + pad(from.getUTCMinutes()) + ':' + pad(from.getUTCSeconds())\n        toTime = pad(to.getUTCHours()) + ':' + pad(to.getUTCMinutes()) + ':' + pad(to.getUTCSeconds())\n        return\n      }\n    }\n\n    // Relative shorthand — show current moment minus offset\n    const now = new Date()\n    rangeEnd = new Date(now.getFullYear(), now.getMonth(), now.getDate())\n    rangeStart = null\n    fromTime = '00:00:00'\n    toTime = pad(now.getUTCHours()) + ':' + pad(now.getUTCMinutes()) + ':' + pad(now.getUTCSeconds())\n  })\n\n  function pad(n: number): string {\n    return String(n).padStart(2, '0')\n  }\n\n  function handleDateSelect(date: Date) {\n    if (!rangeStart || rangeEnd) {\n      rangeStart = date\n      rangeEnd = null\n      hoverDate = null\n    } else {\n      if (date < rangeStart) {\n        rangeEnd = rangeStart\n        rangeStart = date\n      } else {\n        rangeEnd = date\n      }\n      hoverDate = null\n    }\n  }\n\n  function handleDateHover(date: Date | null) {\n    if (rangeStart && !rangeEnd) {\n      hoverDate = date\n    }\n  }\n\n  function applyCalendarRange() {\n    if (!rangeStart || !rangeEnd) return\n    const [fh, fm, fs] = fromTime.split(':').map(Number)\n    const [th, tm, ts] = toTime.split(':').map(Number)\n\n    let fromDate: Date\n    let toDate: Date\n    if (timezone === 'UTC') {\n      fromDate = new Date(Date.UTC(rangeStart.getFullYear(), rangeStart.getMonth(), rangeStart.getDate(), fh, fm, fs))\n      toDate = new Date(Date.UTC(rangeEnd.getFullYear(), rangeEnd.getMonth(), rangeEnd.getDate(), th, tm, ts))\n    } else {\n      fromDate = new Date(rangeStart.getFullYear(), rangeStart.getMonth(), rangeStart.getDate(), fh, fm, fs)\n      toDate = new Date(rangeEnd.getFullYear(), rangeEnd.getMonth(), rangeEnd.getDate(), th, tm, ts)\n    }\n\n    onchange(encodeAbsoluteDashboardRange(fromDate.toISOString(), toDate.toISOString()))\n    open = false\n  }\n\n  function applyPreset(v: string) {\n    onchange(v)\n    open = false\n  }\n\n  function cancel() {\n    open = false\n  }\n\n  function onWindowMouseDown(event: MouseEvent) {\n    if (!open || !rootEl) return\n    if (!(event.target instanceof Node)) return\n    if (!rootEl.contains(event.target)) {\n      open = false\n    }\n  }\n\n  function onWindowKeyDown(event: KeyboardEvent) {\n    if (event.key === 'Escape') {\n      open = false\n    }\n  }\n\n  const rangeDescription = $derived.by(() => {\n    if (!rangeStart) return ''\n    const fmt = (d: Date) =>\n      `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())}`\n    if (!rangeEnd) return fmt(rangeStart)\n    return `${fmt(rangeStart)}  →  ${fmt(rangeEnd)}`\n  })\n</script>\n\n<svelte:window onmousedown={onWindowMouseDown} onkeydown={onWindowKeyDown} />\n\n<div class=\"relative\" bind:this={rootEl}>\n  <!-- Trigger button -->\n  <button\n    class=\"inline-flex items-center gap-1.5 text-xs bg-transparent border border-gray-300 dark:border-gray-700 rounded px-2 py-1 text-gray-700 dark:text-gray-300 hover:border-ch-orange transition-colors\"\n    onclick={() => open = !open}\n    title=\"Select dashboard time range\"\n  >\n    <Clock3 size={12} class=\"text-ch-orange\" />\n    <span class=\"max-w-[220px] truncate\">{label}</span>\n    <ChevronDown size={12} class=\"text-gray-400 transition-transform {open ? 'rotate-180' : ''}\" />\n  </button>\n\n  <!-- Popover -->\n  {#if open}\n    <div\n      class=\"absolute right-0 mt-2 z-[70] surface-card rounded-xl border border-gray-200 dark:border-gray-800 shadow-xl backdrop-blur-xl\n        sm:w-[750px] max-w-[95vw] overflow-hidden\"\n    >\n      <div class=\"relative\">\n        <!-- Left: Calendar + time inputs (defines the popover height) -->\n        <div class=\"p-5 sm:pr-[195px] flex flex-col gap-3\">\n          <DualCalendar\n            {rangeStart}\n            {rangeEnd}\n            {hoverDate}\n            onselect={handleDateSelect}\n            onhover={handleDateHover}\n          />\n\n          {#if rangeDescription}\n            <div class=\"text-center text-xs text-gray-500 dark:text-gray-400 font-mono\">\n              {rangeDescription}\n            </div>\n          {/if}\n\n          <div class=\"border-t border-gray-200 dark:border-gray-800 pt-3 flex flex-col gap-2.5\">\n            <TimeInput label=\"From\" value={fromTime} onchange={(v) => fromTime = v} />\n            <TimeInput label=\"To\" value={toTime} onchange={(v) => toTime = v} />\n            <TimezoneSelect value={timezone} onchange={(v) => timezone = v} />\n          </div>\n\n          <div class=\"flex items-center justify-end gap-2 pt-1\">\n            <button class=\"ds-btn-outline px-3 py-1.5 text-xs\" onclick={cancel}>Cancel</button>\n            <button\n              class=\"ds-btn-primary px-3 py-1.5 text-xs\"\n              onclick={applyCalendarRange}\n              disabled={!rangeStart || !rangeEnd}\n            >\n              Apply\n            </button>\n          </div>\n        </div>\n\n        <!-- Right: Presets sidebar — absolutely positioned, scrolls within calendar height -->\n        <div class=\"border-t sm:border-t-0 sm:border-l border-gray-200 dark:border-gray-800\n          w-full max-h-[50vh]\n          sm:absolute sm:top-0 sm:right-0 sm:bottom-0 sm:w-[185px] sm:max-h-none\n          py-3 overflow-y-auto\">\n          <PresetList currentValue={value} onselect={applyPreset} />\n        </div>\n      </div>\n    </div>\n  {/if}\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/dashboard/time-picker/CalendarMonth.svelte",
    "content": "<script lang=\"ts\">\n  import { buildMonthGrid, isSameDay, isInRange, isToday, monthName } from '../../../utils/calendar'\n\n  interface Props {\n    year: number\n    month: number\n    rangeStart: Date | null\n    rangeEnd: Date | null\n    hoverDate: Date | null\n    onselect: (date: Date) => void\n    onhover: (date: Date | null) => void\n  }\n\n  let { year, month, rangeStart, rangeEnd, hoverDate, onselect, onhover }: Props = $props()\n\n  const WEEKDAYS = ['Su', 'Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa']\n\n  const grid = $derived(buildMonthGrid(year, month))\n  const title = $derived(`${monthName(month)} ${year}`)\n\n  function cellClass(date: Date): string {\n    const base = 'h-8 w-8 flex items-center justify-center text-xs rounded-full cursor-pointer transition-colors'\n    const parts = [base]\n\n    const isStart = rangeStart && isSameDay(date, rangeStart)\n    const isEnd = rangeEnd && isSameDay(date, rangeEnd)\n\n    if (isStart || isEnd) {\n      parts.push('bg-ch-orange text-white font-semibold')\n    } else if (rangeStart && rangeEnd && isInRange(date, rangeStart, rangeEnd)) {\n      parts.push('bg-orange-100 dark:bg-orange-900/25 text-ch-orange')\n    } else if (rangeStart && !rangeEnd && hoverDate) {\n      const lo = date.getTime() < rangeStart.getTime() ? date : rangeStart\n      const hi = date.getTime() < rangeStart.getTime() ? rangeStart : date\n      const hd = hoverDate.getTime() < rangeStart.getTime() ? hoverDate : hoverDate\n      const previewStart = hd < rangeStart ? hd : rangeStart\n      const previewEnd = hd < rangeStart ? rangeStart : hd\n      if (isInRange(date, previewStart, previewEnd) && !isSameDay(date, rangeStart)) {\n        parts.push('bg-orange-50 dark:bg-orange-900/15 text-orange-400 dark:text-orange-300')\n      } else if (!isToday(date)) {\n        parts.push('text-gray-700 dark:text-gray-300 hover:bg-gray-200 dark:hover:bg-gray-700')\n      }\n    } else if (!isToday(date)) {\n      parts.push('text-gray-700 dark:text-gray-300 hover:bg-gray-200 dark:hover:bg-gray-700')\n    }\n\n    if (isToday(date) && !isStart && !isEnd) {\n      parts.push('font-bold ring-1 ring-ch-orange')\n    }\n\n    return parts.join(' ')\n  }\n</script>\n\n<div class=\"select-none\">\n  <div class=\"text-center text-sm font-semibold text-gray-800 dark:text-gray-200 mb-2\">\n    {title}\n  </div>\n\n  <div class=\"grid grid-cols-7 gap-0.5 mb-1\">\n    {#each WEEKDAYS as day}\n      <div class=\"h-8 w-8 flex items-center justify-center text-[10px] font-medium text-gray-400 dark:text-gray-500\">\n        {day}\n      </div>\n    {/each}\n  </div>\n\n  {#each grid as week}\n    <div class=\"grid grid-cols-7 gap-0.5\">\n      {#each week as cell}\n        {#if cell}\n          <button\n            type=\"button\"\n            class={cellClass(cell)}\n            onclick={() => onselect(cell)}\n            onmouseenter={() => onhover(cell)}\n            onmouseleave={() => onhover(null)}\n          >\n            {cell.getDate()}\n          </button>\n        {:else}\n          <div class=\"h-8 w-8\"></div>\n        {/if}\n      {/each}\n    </div>\n  {/each}\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/dashboard/time-picker/DualCalendar.svelte",
    "content": "<script lang=\"ts\">\n  import { shiftMonth } from '../../../utils/calendar'\n  import { ChevronLeft, ChevronRight } from 'lucide-svelte'\n  import CalendarMonth from './CalendarMonth.svelte'\n\n  interface Props {\n    rangeStart: Date | null\n    rangeEnd: Date | null\n    hoverDate: Date | null\n    onselect: (date: Date) => void\n    onhover: (date: Date | null) => void\n  }\n\n  let { rangeStart, rangeEnd, hoverDate, onselect, onhover }: Props = $props()\n\n  let leftYear = $state(new Date().getFullYear())\n  let leftMonth = $state(new Date().getMonth() + 1)\n\n  const right = $derived(shiftMonth(leftYear, leftMonth, 1))\n\n  function prev() {\n    const m = shiftMonth(leftYear, leftMonth, -1)\n    leftYear = m.year\n    leftMonth = m.month\n  }\n\n  function next() {\n    const m = shiftMonth(leftYear, leftMonth, 1)\n    leftYear = m.year\n    leftMonth = m.month\n  }\n</script>\n\n<div>\n  <div class=\"flex items-center justify-between mb-3 px-1\">\n    <button\n      type=\"button\"\n      class=\"ds-icon-btn\"\n      onclick={prev}\n      title=\"Previous month\"\n    >\n      <ChevronLeft size={16} />\n    </button>\n    <button\n      type=\"button\"\n      class=\"ds-icon-btn\"\n      onclick={next}\n      title=\"Next month\"\n    >\n      <ChevronRight size={16} />\n    </button>\n  </div>\n\n  <div class=\"flex gap-8\">\n    <CalendarMonth\n      year={leftYear}\n      month={leftMonth}\n      {rangeStart}\n      {rangeEnd}\n      {hoverDate}\n      {onselect}\n      {onhover}\n    />\n    <CalendarMonth\n      year={right.year}\n      month={right.month}\n      {rangeStart}\n      {rangeEnd}\n      {hoverDate}\n      {onselect}\n      {onhover}\n    />\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/dashboard/time-picker/PresetList.svelte",
    "content": "<script lang=\"ts\">\n  import { EXTENDED_PRESETS } from '../../../utils/chart-transform'\n\n  interface Props {\n    currentValue: string\n    onselect: (value: string) => void\n  }\n\n  let { currentValue, onselect }: Props = $props()\n\n  const recentPresets = EXTENDED_PRESETS.filter((p) => p.group === 'recent')\n  const namedPresets = EXTENDED_PRESETS.filter((p) => p.group === 'named')\n  const durationPresets = EXTENDED_PRESETS.filter((p) => p.group === 'duration')\n</script>\n\n<div class=\"flex flex-col h-full overflow-y-auto px-1.5\">\n  {#each recentPresets as preset}\n    <button\n      type=\"button\"\n      class=\"text-left px-3 py-1.5 text-xs rounded transition-colors whitespace-nowrap\n        {currentValue === preset.value\n          ? 'text-ch-orange bg-orange-100/60 dark:bg-orange-900/20 font-medium'\n          : 'text-gray-600 dark:text-gray-400 hover:bg-gray-100 dark:hover:bg-gray-800 hover:text-gray-800 dark:hover:text-gray-200'}\"\n      onclick={() => onselect(preset.value)}\n    >\n      {preset.label}\n    </button>\n  {/each}\n\n  <div class=\"border-t border-gray-200 dark:border-gray-700 my-1.5\"></div>\n\n  {#each namedPresets as preset}\n    <button\n      type=\"button\"\n      class=\"text-left px-3 py-1.5 text-xs rounded transition-colors whitespace-nowrap\n        {currentValue === preset.value\n          ? 'text-ch-orange bg-orange-100/60 dark:bg-orange-900/20 font-medium'\n          : 'text-gray-600 dark:text-gray-400 hover:bg-gray-100 dark:hover:bg-gray-800 hover:text-gray-800 dark:hover:text-gray-200'}\"\n      onclick={() => onselect(preset.value)}\n    >\n      {preset.label}\n    </button>\n  {/each}\n\n  <div class=\"border-t border-gray-200 dark:border-gray-700 my-1.5\"></div>\n\n  {#each durationPresets as preset}\n    <button\n      type=\"button\"\n      class=\"text-left px-3 py-1.5 text-xs rounded transition-colors whitespace-nowrap\n        {currentValue === preset.value\n          ? 'text-ch-orange bg-orange-100/60 dark:bg-orange-900/20 font-medium'\n          : 'text-gray-600 dark:text-gray-400 hover:bg-gray-100 dark:hover:bg-gray-800 hover:text-gray-800 dark:hover:text-gray-200'}\"\n      onclick={() => onselect(preset.value)}\n    >\n      {preset.label}\n    </button>\n  {/each}\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/dashboard/time-picker/TimeInput.svelte",
    "content": "<script lang=\"ts\">\n  import { Clock3 } from 'lucide-svelte'\n\n  interface Props {\n    label: string\n    value: string\n    onchange: (value: string) => void\n  }\n\n  let { label, value, onchange }: Props = $props()\n\n  let parts = $derived(value.split(':'))\n  let h = $derived(parts[0] ?? '00')\n  let m = $derived(parts[1] ?? '00')\n  let s = $derived(parts[2] ?? '00')\n\n  function update(field: 'h' | 'm' | 's', raw: string) {\n    let n = parseInt(raw, 10)\n    if (isNaN(n)) n = 0\n    const max = field === 'h' ? 23 : 59\n    n = Math.max(0, Math.min(max, n))\n    const padded = String(n).padStart(2, '0')\n    const nh = field === 'h' ? padded : h\n    const nm = field === 'm' ? padded : m\n    const ns = field === 's' ? padded : s\n    onchange(`${nh}:${nm}:${ns}`)\n  }\n\n  function selectOnFocus(e: FocusEvent) {\n    (e.target as HTMLInputElement).select()\n  }\n</script>\n\n<div class=\"flex items-center gap-2\">\n  <Clock3 size={13} class=\"text-gray-400 shrink-0\" />\n  <span class=\"text-xs text-gray-500 dark:text-gray-400 w-10 shrink-0\">{label}:</span>\n  <div class=\"flex items-center gap-1\">\n    <input\n      type=\"number\"\n      min=\"0\"\n      max=\"23\"\n      value={h}\n      class=\"w-14 text-center ds-input-sm !px-1 tabular-nums\"\n      onchange={(e) => update('h', e.currentTarget.value)}\n      onfocus={selectOnFocus}\n    />\n    <span class=\"text-xs text-gray-400\">:</span>\n    <input\n      type=\"number\"\n      min=\"0\"\n      max=\"59\"\n      value={m}\n      class=\"w-14 text-center ds-input-sm !px-1 tabular-nums\"\n      onchange={(e) => update('m', e.currentTarget.value)}\n      onfocus={selectOnFocus}\n    />\n    <span class=\"text-xs text-gray-400\">:</span>\n    <input\n      type=\"number\"\n      min=\"0\"\n      max=\"59\"\n      value={s}\n      class=\"w-14 text-center ds-input-sm !px-1 tabular-nums\"\n      onchange={(e) => update('s', e.currentTarget.value)}\n      onfocus={selectOnFocus}\n    />\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/dashboard/time-picker/TimezoneSelect.svelte",
    "content": "<script lang=\"ts\">\n  interface Props {\n    value: string\n    onchange: (tz: string) => void\n  }\n\n  let { value, onchange }: Props = $props()\n\n  const browserTz = Intl.DateTimeFormat().resolvedOptions().timeZone\n</script>\n\n<div class=\"flex items-center gap-2\">\n  <span class=\"text-xs text-gray-500 dark:text-gray-400 shrink-0\">Timezone:</span>\n  <select\n    class=\"ds-select text-xs flex-1\"\n    {value}\n    onchange={(e) => onchange(e.currentTarget.value)}\n  >\n    <option value=\"UTC\">UTC</option>\n    <option value=\"local\">{browserTz}</option>\n  </select>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/editor/InsightsPanel.svelte",
    "content": "<script lang=\"ts\">\n  import type { ColumnMeta, QueryPlanNode, QueryStats, QueryEstimateResult } from '../../types/query'\n  import { getDisplayType } from '../../utils/ch-types'\n  import { formatBytes, formatElapsed, formatNumber } from '../../utils/format'\n  import Combobox from '../common/Combobox.svelte'\n  import type { ComboboxOption } from '../common/Combobox.svelte'\n  import Spinner from '../common/Spinner.svelte'\n  import { Activity, Layers3, AreaChart, GitBranch, Gauge, RefreshCw, FlaskConical, Scale } from 'lucide-svelte'\n\n  interface Props {\n    meta: ColumnMeta[]\n    data: unknown[][]\n    stats?: QueryStats | null\n    elapsedMs?: number\n    running?: boolean\n    streamRows?: number\n    streamChunks?: number\n    streamStartedAt?: number | null\n    streamLastChunkAt?: number | null\n    planNodes?: QueryPlanNode[]\n    planLines?: string[]\n    planSource?: string\n    planLoading?: boolean\n    planError?: string | null\n    onLoadPlan?: () => void\n    onSample?: (perShard: number) => void\n    profile?: Record<string, unknown> | null\n    profileAvailable?: boolean\n    profileReason?: string | null\n    profileLoading?: boolean\n    profileError?: string | null\n    samplingMode?: string | null\n    estimate?: QueryEstimateResult | null\n  }\n\n  let {\n    meta,\n    data,\n    stats = null,\n    elapsedMs = 0,\n    running = false,\n    streamRows = 0,\n    streamChunks = 0,\n    streamStartedAt = null,\n    streamLastChunkAt = null,\n    planNodes = [],\n    planLines = [],\n    planSource = '',\n    planLoading = false,\n    planError = null,\n    onLoadPlan,\n    onSample,\n    profile = null,\n    profileAvailable = false,\n    profileReason = null,\n    profileLoading = false,\n    profileError = null,\n    samplingMode = null,\n    estimate = null,\n  }: Props = $props()\n\n  const encoder = new TextEncoder()\n  let histogramColumn = $state('')\n  let histogramBins = $state(12)\n  let samplePerShard = $state(25)\n\n  function asNumber(v: unknown): number | null {\n    if (v === null || v === undefined || v === '') return null\n    const n = Number(v)\n    return Number.isFinite(n) ? n : null\n  }\n\n  function estimateBytes(v: unknown): number {\n    if (v === null || v === undefined) return 1\n    if (typeof v === 'number' || typeof v === 'bigint') return 8\n    if (typeof v === 'boolean') return 1\n    if (typeof v === 'string') return encoder.encode(v).length\n    try {\n      return encoder.encode(JSON.stringify(v)).length\n    } catch {\n      return 16\n    }\n  }\n\n  const elapsedSeconds = $derived(elapsedMs > 0 ? elapsedMs / 1000 : 0)\n  const rowsPerSec = $derived(elapsedSeconds > 0 ? data.length / elapsedSeconds : 0)\n  const numericColumns = $derived(meta.filter((col) => getDisplayType(col.type) === 'number'))\n  const histogramColumnOptions = $derived.by<ComboboxOption[]>(() =>\n    numericColumns.map((col) => ({ value: col.name, label: col.name, hint: col.type, keywords: `${col.name} ${col.type}` }))\n  )\n\n  $effect(() => {\n    const exists = numericColumns.some((c) => c.name === histogramColumn)\n    if (!exists) histogramColumn = numericColumns[0]?.name ?? ''\n  })\n\n  const columnMemory = $derived.by(() => {\n    if (!meta.length || !data.length) return []\n    const items = meta.map((col, ci) => {\n      let bytes = 0\n      for (let r = 0; r < data.length; r++) bytes += estimateBytes(data[r][ci])\n      return {\n        name: col.name,\n        type: col.type,\n        bytes,\n        avgBytes: bytes / Math.max(data.length, 1),\n      }\n    })\n    const total = items.reduce((sum, col) => sum + col.bytes, 0)\n    return items\n      .map((col) => ({\n        ...col,\n        pct: total > 0 ? (col.bytes / total) * 100 : 0,\n      }))\n      .sort((a, b) => b.bytes - a.bytes)\n  })\n\n  const totalEstimatedBytes = $derived(columnMemory.reduce((sum, c) => sum + c.bytes, 0))\n  const throughputBytesPerSec = $derived(elapsedSeconds > 0 ? totalEstimatedBytes / elapsedSeconds : 0)\n\n  const histogram = $derived.by(() => {\n    if (!histogramColumn) return null\n    const index = meta.findIndex((c) => c.name === histogramColumn)\n    if (index === -1) return null\n\n    const values: number[] = []\n    for (let i = 0; i < data.length; i++) {\n      const n = asNumber(data[i][index])\n      if (n !== null) values.push(n)\n    }\n    if (!values.length) return null\n\n    let min = Math.min(...values)\n    let max = Math.max(...values)\n    if (min === max) {\n      min = min - 1\n      max = max + 1\n    }\n\n    const bins = Math.max(6, Math.min(24, histogramBins))\n    const counts = Array.from({ length: bins }, () => 0)\n    const width = (max - min) / bins\n\n    for (const v of values) {\n      let idx = Math.floor((v - min) / width)\n      if (idx < 0) idx = 0\n      if (idx >= bins) idx = bins - 1\n      counts[idx]++\n    }\n\n    const maxCount = Math.max(...counts, 1)\n    return counts.map((count, i) => {\n      const from = min + i * width\n      const to = from + width\n      return {\n        count,\n        from,\n        to,\n        widthPct: (count / maxCount) * 100,\n      }\n    })\n  })\n\n  function profileNumber(key: string): number {\n    if (!profile) return 0\n    const value = profile[key]\n    const num = Number(value ?? 0)\n    return Number.isFinite(num) ? num : 0\n  }\n\n  function handleSample() {\n    if (!onSample) return\n    const n = Math.max(1, Math.min(500, samplePerShard))\n    samplePerShard = n\n    onSample(n)\n  }\n\n  function splitPlanLabel(label: string): { title: string; detail: string } {\n    const cleaned = (label ?? '').trim()\n    if (!cleaned) return { title: 'Operation', detail: '' }\n\n    const parenIdx = cleaned.indexOf('(')\n    if (parenIdx > 0) {\n      return {\n        title: cleaned.slice(0, parenIdx).trim(),\n        detail: cleaned.slice(parenIdx).trim(),\n      }\n    }\n\n    const dashIdx = cleaned.indexOf(':')\n    if (dashIdx > 0) {\n      return {\n        title: cleaned.slice(0, dashIdx).trim(),\n        detail: cleaned.slice(dashIdx + 1).trim(),\n      }\n    }\n\n    return { title: cleaned, detail: '' }\n  }\n\n  const planFlow = $derived.by(() => {\n    if (planNodes.length > 0) {\n      return planNodes.map((node, index) => {\n        const parsed = splitPlanLabel(node.label)\n        return {\n          id: node.id || `node-${index}`,\n          level: node.level ?? 0,\n          index: index + 1,\n          title: parsed.title,\n          detail: parsed.detail,\n          raw: node.label,\n        }\n      })\n    }\n\n    return planLines.map((line, index) => {\n      const parsed = splitPlanLabel(line)\n      return {\n        id: `line-${index}`,\n        level: 0,\n        index: index + 1,\n        title: parsed.title,\n        detail: parsed.detail,\n        raw: line,\n      }\n    })\n  })\n</script>\n\n<div class=\"flex-1 overflow-auto min-h-0 p-4 space-y-4\">\n  <div class=\"grid grid-cols-1 md:grid-cols-2 xl:grid-cols-4 gap-3\">\n    <div class=\"surface-card rounded-xl p-3\">\n      <div class=\"text-[11px] uppercase tracking-wider text-gray-500 inline-flex items-center gap-1.5\"><Activity size={13} />Runtime</div>\n      <div class=\"mt-2 text-xl font-semibold text-gray-800 dark:text-gray-100\">{elapsedMs > 0 ? formatElapsed(elapsedSeconds) : '\\u2014'}</div>\n      <div class=\"mt-1 text-xs text-gray-500\">{formatNumber(data.length)} rows returned</div>\n    </div>\n\n    <div class=\"surface-card rounded-xl p-3\">\n      <div class=\"text-[11px] uppercase tracking-wider text-gray-500 inline-flex items-center gap-1.5\"><Gauge size={13} />Inline Profiling</div>\n      <div class=\"mt-2 text-sm font-semibold text-gray-800 dark:text-gray-100\">{rowsPerSec > 0 ? formatNumber(Math.round(rowsPerSec)) : '0'} rows/s</div>\n      <div class=\"mt-1 text-xs text-gray-500\">{throughputBytesPerSec > 0 ? formatBytes(throughputBytesPerSec) : '0 B'}/s (estimated)</div>\n    </div>\n\n    <div class=\"surface-card rounded-xl p-3\">\n      <div class=\"text-[11px] uppercase tracking-wider text-gray-500 inline-flex items-center gap-1.5\"><Layers3 size={13} />Streaming Viewer</div>\n      <div class=\"mt-2 text-sm font-semibold text-gray-800 dark:text-gray-100\">{formatNumber(streamRows || data.length)} rows, {formatNumber(streamChunks)} chunks</div>\n      <div class=\"mt-1 text-xs text-gray-500\">\n        {#if running}\n          Live ingest running...\n        {:else if streamLastChunkAt}\n          Last chunk at {new Date(streamLastChunkAt).toLocaleTimeString()}\n        {:else}\n          No stream events yet\n        {/if}\n      </div>\n    </div>\n\n    <div class=\"surface-card rounded-xl p-3\">\n      <div class=\"text-[11px] uppercase tracking-wider text-gray-500 inline-flex items-center gap-1.5\"><Scale size={13} />Estimate vs Actual</div>\n      {#if estimate && estimate.success && !estimate.error && stats}\n        {@const actualRows = Number(stats.rows_read ?? 0)}\n        {@const estimatedRows = estimate.total_rows}\n        {@const accuracy = estimatedRows > 0 ? Math.round((Math.min(actualRows, estimatedRows) / Math.max(actualRows, estimatedRows)) * 100) : 0}\n        <div class=\"mt-2 grid grid-cols-2 gap-1.5 text-xs\">\n          <div class=\"rounded-md bg-gray-100/70 dark:bg-gray-800/70 px-2 py-1\">\n            <div class=\"text-gray-500\">Estimated</div>\n            <div class=\"font-semibold text-gray-800 dark:text-gray-100\">{formatNumber(estimatedRows)} rows</div>\n          </div>\n          <div class=\"rounded-md bg-gray-100/70 dark:bg-gray-800/70 px-2 py-1\">\n            <div class=\"text-gray-500\">Actual</div>\n            <div class=\"font-semibold text-gray-800 dark:text-gray-100\">{formatNumber(actualRows)} rows</div>\n          </div>\n        </div>\n        <div class=\"mt-1.5 flex items-center gap-2\">\n          <div class=\"flex-1 h-1.5 rounded bg-gray-200 dark:bg-gray-800 overflow-hidden\">\n            <div class=\"h-full rounded {accuracy >= 80 ? 'bg-green-500' : accuracy >= 50 ? 'bg-yellow-500' : 'bg-red-500'}\" style=\"width:{accuracy}%\"></div>\n          </div>\n          <span class=\"text-xs font-medium {accuracy >= 80 ? 'text-green-600 dark:text-green-400' : accuracy >= 50 ? 'text-yellow-600 dark:text-yellow-400' : 'text-red-600 dark:text-red-400'}\">{accuracy}%</span>\n        </div>\n      {:else if estimate && estimate.success && !estimate.error && !stats}\n        <div class=\"mt-2 text-xs text-gray-800 dark:text-gray-100\">\n          <span class=\"font-semibold\">{formatNumber(estimate.total_rows)}</span> rows · {estimate.total_parts} parts · {formatNumber(estimate.total_marks)} marks\n        </div>\n        <div class=\"mt-1 text-xs text-gray-500\">Run the query to compare with actual.</div>\n      {:else}\n        <div class=\"mt-2 text-xs text-gray-500\">No estimate available. Type a SELECT query.</div>\n      {/if}\n    </div>\n\n    <div class=\"surface-card rounded-xl p-3\">\n      <div class=\"text-[11px] uppercase tracking-wider text-gray-500 inline-flex items-center gap-1.5\"><FlaskConical size={13} />Sampling</div>\n      <div class=\"mt-2 flex items-center gap-2\">\n        <input\n          type=\"number\"\n          min=\"1\"\n          max=\"500\"\n          bind:value={samplePerShard}\n          class=\"w-24 px-2 py-1 text-xs rounded-md bg-gray-100 dark:bg-gray-800 border border-gray-300 dark:border-gray-700\"\n        />\n        <button\n          class=\"px-2.5 py-1 text-xs rounded-md border border-gray-300 dark:border-gray-700 hover:bg-gray-200 dark:hover:bg-gray-800\"\n          onclick={handleSample}\n          disabled={!onSample || running}\n        >Sample / shard</button>\n      </div>\n      <div class=\"mt-1 text-xs text-gray-500\">Mode: {samplingMode ?? 'none'}</div>\n    </div>\n  </div>\n\n  <div class=\"grid grid-cols-1 xl:grid-cols-2 gap-4\">\n    <div class=\"surface-card rounded-xl p-3\">\n      <div class=\"flex items-center justify-between mb-3\">\n        <div class=\"text-[11px] uppercase tracking-wider text-gray-500 inline-flex items-center gap-1.5\"><GitBranch size={13} />Query Plan Visualizer</div>\n        <button\n          class=\"px-2.5 py-1 text-xs rounded-md border border-gray-300 dark:border-gray-700 hover:bg-gray-200 dark:hover:bg-gray-800 inline-flex items-center gap-1.5\"\n          onclick={() => onLoadPlan?.()}\n          disabled={!onLoadPlan || planLoading}\n        >\n          {#if planLoading}<Spinner size=\"sm\" />{:else}<RefreshCw size={12} />{/if}\n          Refresh Plan\n        </button>\n      </div>\n      {#if planLoading}\n        <div class=\"text-xs text-gray-500\">Loading query plan...</div>\n      {:else if planError}\n        <div class=\"text-xs text-red-500\">{planError}</div>\n      {:else if planFlow.length > 0}\n        <div class=\"text-[11px] text-gray-500 mb-2\">Source: {planSource} · {formatNumber(planFlow.length)} stages</div>\n        <div class=\"max-h-64 overflow-auto rounded-lg border border-gray-200 dark:border-gray-800 bg-gray-50/70 dark:bg-gray-900/60 p-2.5\">\n          <div class=\"space-y-2\">\n            {#each planFlow as node, i (node.id)}\n              <div class=\"relative\" style={`margin-left:${node.level * 14}px`}>\n                {#if i < planFlow.length - 1}\n                  <div class=\"absolute left-3 top-7 h-6 w-px bg-gray-300 dark:bg-gray-700\"></div>\n                {/if}\n                <div class=\"flex items-start gap-2\">\n                  <div class=\"mt-0.5 w-6 h-6 rounded-full border border-gray-300 dark:border-gray-700 bg-gray-100 dark:bg-gray-800 text-[10px] font-semibold text-gray-600 dark:text-gray-300 flex items-center justify-center\">\n                    {node.index}\n                  </div>\n                  <div class=\"flex-1 min-w-0 rounded-md border border-gray-200 dark:border-gray-800 bg-white/60 dark:bg-gray-950/40 px-2.5 py-1.5\">\n                    <div class=\"text-xs font-semibold text-gray-700 dark:text-gray-200\">{node.title}</div>\n                    {#if node.detail}\n                      <div class=\"text-[11px] text-gray-500 mt-0.5 break-words\">{node.detail}</div>\n                    {/if}\n                  </div>\n                </div>\n              </div>\n            {/each}\n          </div>\n        </div>\n      {:else}\n        <div class=\"text-xs text-gray-500\">No plan loaded yet.</div>\n      {/if}\n    </div>\n\n    <div class=\"surface-card rounded-xl p-3\">\n      <div class=\"text-[11px] uppercase tracking-wider text-gray-500 inline-flex items-center gap-1.5 mb-3\"><Layers3 size={13} />Columnar Memory View</div>\n      {#if columnMemory.length === 0}\n        <div class=\"text-xs text-gray-500\">Run a query with rows to estimate per-column memory.</div>\n      {:else}\n        <div class=\"space-y-2 max-h-64 overflow-auto pr-1\">\n          {#each columnMemory as col}\n            <div>\n              <div class=\"flex items-center justify-between text-xs\">\n                <span class=\"font-mono text-gray-700 dark:text-gray-300 truncate pr-3\">{col.name}</span>\n                <span class=\"text-gray-500\">{formatBytes(col.bytes)} ({col.pct.toFixed(1)}%)</span>\n              </div>\n              <div class=\"mt-1 h-1.5 rounded bg-gray-200 dark:bg-gray-800 overflow-hidden\">\n                <div class=\"h-full bg-ch-orange/80\" style={`width:${Math.max(3, col.pct)}%`}></div>\n              </div>\n              <div class=\"mt-0.5 text-[11px] text-gray-500\">{col.type} · avg {formatBytes(col.avgBytes)}/row</div>\n            </div>\n          {/each}\n        </div>\n      {/if}\n    </div>\n  </div>\n\n  <div class=\"grid grid-cols-1 xl:grid-cols-2 gap-4\">\n    <div class=\"surface-card rounded-xl p-3\">\n      <div class=\"flex items-center justify-between mb-3\">\n        <div class=\"text-[11px] uppercase tracking-wider text-gray-500 inline-flex items-center gap-1.5\"><AreaChart size={13} />Histogram Per Column</div>\n        <div class=\"flex items-center gap-2\">\n          <div class=\"w-56\">\n            <Combobox\n              options={histogramColumnOptions}\n              value={histogramColumn}\n              onChange={(v) => histogramColumn = v}\n              placeholder=\"Numeric column\"\n            />\n          </div>\n          <input type=\"range\" min=\"6\" max=\"24\" step=\"1\" bind:value={histogramBins} />\n        </div>\n      </div>\n\n      {#if !histogram || histogram.length === 0}\n        <div class=\"text-xs text-gray-500\">No numeric values available for histogram.</div>\n      {:else}\n        <div class=\"space-y-1.5\">\n          {#each histogram as bin}\n            <div class=\"grid grid-cols-[90px_1fr_42px] items-center gap-2 text-[11px]\">\n              <span class=\"text-gray-500 font-mono truncate\">{bin.from.toFixed(2)}</span>\n              <div class=\"h-3 rounded bg-gray-200 dark:bg-gray-800 overflow-hidden\">\n                <div class=\"h-full bg-ch-orange/80\" style={`width:${Math.max(bin.widthPct, 2)}%`}></div>\n              </div>\n              <span class=\"text-right text-gray-600 dark:text-gray-400\">{bin.count}</span>\n            </div>\n          {/each}\n        </div>\n      {/if}\n    </div>\n\n    <div class=\"surface-card rounded-xl p-3\">\n      <div class=\"text-[11px] uppercase tracking-wider text-gray-500 inline-flex items-center gap-1.5 mb-3\"><Gauge size={13} />Inline Profile Events</div>\n      {#if profileLoading}\n        <div class=\"text-xs text-gray-500\">Loading profile from system.query_log...</div>\n      {:else if profileError}\n        <div class=\"text-xs text-red-500\">{profileError}</div>\n      {:else if profileAvailable && profile}\n        <div class=\"grid grid-cols-2 gap-2 text-xs\">\n          <div class=\"rounded-md bg-gray-100/70 dark:bg-gray-800/70 px-2 py-1.5\">\n            <div class=\"text-gray-500\">Duration</div>\n            <div class=\"font-semibold text-gray-800 dark:text-gray-100\">{profileNumber('query_duration_ms')} ms</div>\n          </div>\n          <div class=\"rounded-md bg-gray-100/70 dark:bg-gray-800/70 px-2 py-1.5\">\n            <div class=\"text-gray-500\">Memory</div>\n            <div class=\"font-semibold text-gray-800 dark:text-gray-100\">{formatBytes(profileNumber('memory_usage'))}</div>\n          </div>\n          <div class=\"rounded-md bg-gray-100/70 dark:bg-gray-800/70 px-2 py-1.5\">\n            <div class=\"text-gray-500\">Read Rows</div>\n            <div class=\"font-semibold text-gray-800 dark:text-gray-100\">{formatNumber(profileNumber('read_rows'))}</div>\n          </div>\n          <div class=\"rounded-md bg-gray-100/70 dark:bg-gray-800/70 px-2 py-1.5\">\n            <div class=\"text-gray-500\">Read Bytes</div>\n            <div class=\"font-semibold text-gray-800 dark:text-gray-100\">{formatBytes(profileNumber('read_bytes'))}</div>\n          </div>\n          <div class=\"rounded-md bg-gray-100/70 dark:bg-gray-800/70 px-2 py-1.5\">\n            <div class=\"text-gray-500\">Result Rows</div>\n            <div class=\"font-semibold text-gray-800 dark:text-gray-100\">{formatNumber(profileNumber('result_rows'))}</div>\n          </div>\n          <div class=\"rounded-md bg-gray-100/70 dark:bg-gray-800/70 px-2 py-1.5\">\n            <div class=\"text-gray-500\">Selected Marks</div>\n            <div class=\"font-semibold text-gray-800 dark:text-gray-100\">{formatNumber(profileNumber('selected_marks'))}</div>\n          </div>\n        </div>\n      {:else}\n        <div class=\"text-xs text-gray-500\">{profileReason ?? 'No profile row available yet.'}</div>\n      {/if}\n    </div>\n  </div>\n\n  {#if stats}\n    <div class=\"text-[11px] text-gray-500\">\n      Stream started: {streamStartedAt ? new Date(streamStartedAt).toLocaleTimeString() : '\\u2014'}\n      · rows_read: {formatNumber(Number(stats.rows_read ?? 0))}\n      · bytes_read: {formatBytes(Number(stats.bytes_read ?? 0))}\n    </div>\n  {/if}\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/editor/ResultFooter.svelte",
    "content": "<script lang=\"ts\">\n  import type { ColumnMeta, QueryStats } from '../../types/query'\n  import { formatNumber, formatElapsed, formatBytes } from '../../utils/format'\n  import {\n    generateCSV,\n    generateTSV,\n    generateJSON,\n    generateJSONCompact,\n    generateJSONLines,\n    generateMarkdown,\n    generateSQLInsert,\n    generateXML,\n    copyToClipboard,\n    downloadFile,\n  } from '../../utils/export'\n  import { success, error } from '../../stores/toast.svelte'\n  import { Table2, BarChart3, Columns3, Sparkles, Copy, Download, ChevronUp, FileJson, FileText, Database, Hash, AlertTriangle } from 'lucide-svelte'\n  import { getFormatNumbers, toggleFormatNumbers } from '../../stores/number-format.svelte'\n  import { getMaxResultRows, setMaxResultRows } from '../../stores/query-limit.svelte'\n\n  type Tab = 'data' | 'stats' | 'schema' | 'insights'\n  type ExportFormat = 'csv' | 'tsv' | 'json' | 'jsoncompact' | 'jsonl' | 'markdown' | 'sql' | 'xml'\n\n  interface Props {\n    activeTab: Tab\n    onTabChange: (tab: Tab) => void\n    meta: ColumnMeta[]\n    data: unknown[][]\n    stats?: QueryStats | null\n    elapsedMs?: number\n    streamRows?: number\n    streamChunks?: number\n  }\n\n  let { activeTab, onTabChange, meta, data, stats = null, elapsedMs = 0, streamRows = 0, streamChunks = 0 }: Props = $props()\n  let copyMenuOpen = $state(false)\n  let downloadMenuOpen = $state(false)\n  let copyMenuRef = $state<HTMLDivElement | null>(null)\n  let downloadMenuRef = $state<HTMLDivElement | null>(null)\n\n  const rowCount = $derived(data.length)\n\n  const tabs: { id: Tab; label: string; icon: typeof Table2 }[] = [\n    { id: 'data', label: 'Data', icon: Table2 },\n    { id: 'stats', label: 'Stats', icon: BarChart3 },\n    { id: 'schema', label: 'Schema', icon: Columns3 },\n    { id: 'insights', label: 'Insights', icon: Sparkles },\n  ]\n\n  const tabClass = (id: Tab) =>\n    `flex items-center gap-1 px-2 py-1 text-xs font-medium transition-colors ${\n      activeTab === id\n        ? 'text-ch-blue border-b-2 border-ch-blue'\n        : 'text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 border-b-2 border-transparent'\n    }`\n\n  const formatOptions: { id: ExportFormat | 'parquet'; label: string; ext: string; mime: string; icon: typeof FileText; disabled?: boolean }[] = [\n    { id: 'csv', label: 'CSV', ext: 'csv', mime: 'text/csv;charset=utf-8', icon: FileText },\n    { id: 'tsv', label: 'TSV', ext: 'tsv', mime: 'text/tab-separated-values;charset=utf-8', icon: FileText },\n    { id: 'json', label: 'JSON', ext: 'json', mime: 'application/json;charset=utf-8', icon: FileJson },\n    { id: 'jsoncompact', label: 'JSONCompact', ext: 'json', mime: 'application/json;charset=utf-8', icon: FileJson },\n    { id: 'jsonl', label: 'JSONLines', ext: 'jsonl', mime: 'application/x-ndjson;charset=utf-8', icon: FileJson },\n    { id: 'markdown', label: 'Markdown', ext: 'md', mime: 'text/markdown;charset=utf-8', icon: FileText },\n    { id: 'sql', label: 'SQL Insert', ext: 'sql', mime: 'application/sql;charset=utf-8', icon: Database },\n    { id: 'xml', label: 'XML', ext: 'xml', mime: 'application/xml;charset=utf-8', icon: FileText },\n    { id: 'parquet', label: 'Parquet (Soon)', ext: 'parquet', mime: 'application/octet-stream', icon: Database, disabled: true },\n  ]\n\n  function payloadFor(format: ExportFormat): string {\n    switch (format) {\n      case 'csv': return generateCSV(meta, data)\n      case 'tsv': return generateTSV(meta, data)\n      case 'json': return generateJSON(meta, data)\n      case 'jsoncompact': return generateJSONCompact(meta, data)\n      case 'jsonl': return generateJSONLines(meta, data)\n      case 'markdown': return generateMarkdown(meta, data)\n      case 'sql': return generateSQLInsert(meta, data)\n      case 'xml': return generateXML(meta, data)\n      default: return generateCSV(meta, data)\n    }\n  }\n\n  async function handleCopy(format: ExportFormat | 'parquet') {\n    if (format === 'parquet') return\n    try {\n      await copyToClipboard(payloadFor(format))\n      success(`Copied ${formatNumber(rowCount)} rows as ${format.toUpperCase()}`)\n      copyMenuOpen = false\n    } catch {\n      error('Failed to copy to clipboard')\n    }\n  }\n\n  function handleDownload(format: ExportFormat | 'parquet') {\n    if (format === 'parquet') return\n    const opt = formatOptions.find((f) => f.id === format)\n    if (!opt) return\n    const filename = `query_results.${opt.ext}`\n    downloadFile(payloadFor(format), filename, opt.mime)\n    success(`Downloaded ${filename}`)\n    downloadMenuOpen = false\n  }\n\n  function closeMenus() {\n    copyMenuOpen = false\n    downloadMenuOpen = false\n  }\n\n  function handleWindowClick(e: MouseEvent) {\n    const target = e.target as Node | null\n    if (!target) return\n    if (copyMenuRef?.contains(target) || downloadMenuRef?.contains(target)) return\n    closeMenus()\n  }\n\n  function handleWindowKeydown(e: KeyboardEvent) {\n    if (e.key === 'Escape') closeMenus()\n  }\n</script>\n\n<svelte:window onclick={handleWindowClick} onkeydown={handleWindowKeydown} />\n\n<div class=\"flex items-center gap-1 px-2 py-0.5 border-t border-gray-200 dark:border-gray-800 bg-gray-100/50 dark:bg-gray-900/50 shrink-0 h-9 select-none\">\n  <!-- Tabs -->\n  <div class=\"flex items-center gap-0.5\">\n    {#each tabs as tab}\n      <button class={tabClass(tab.id)} onclick={() => onTabChange(tab.id)}>\n        <tab.icon size={13} />\n        {tab.label}\n      </button>\n    {/each}\n  </div>\n\n  <!-- Divider -->\n  <div class=\"w-px h-4 bg-gray-300 dark:bg-gray-700 mx-1\"></div>\n\n  <!-- Info chips -->\n  <div class=\"flex items-center gap-3 text-xs text-gray-500 flex-1 min-w-0\">\n    <span>{formatNumber(rowCount)} rows</span>\n    {#if elapsedMs > 0}\n      <span>{formatElapsed(elapsedMs / 1000)}</span>\n    {/if}\n    {#if streamChunks > 0}\n      <span>{formatNumber(streamRows)} streamed</span>\n      <span>{formatNumber(streamChunks)} chunks</span>\n    {/if}\n    {#if stats}\n      {#if stats.rows_read}\n        <span>{formatNumber(stats.rows_read)} read</span>\n      {/if}\n      {#if stats.bytes_read}\n        <span>{formatBytes(stats.bytes_read)}</span>\n      {/if}\n    {/if}\n  </div>\n\n  <!-- Number format toggle -->\n  <div class=\"w-px h-4 bg-gray-300 dark:bg-gray-700 mx-1\"></div>\n  <button\n    class=\"flex items-center gap-1 px-1.5 py-1 text-xs rounded-md transition-colors\n      {getFormatNumbers()\n        ? 'text-ch-orange bg-orange-100/60 dark:bg-orange-900/30'\n        : 'text-gray-400 dark:text-gray-500 hover:text-gray-600 dark:hover:text-gray-300'}\"\n    onclick={toggleFormatNumbers}\n    title={getFormatNumbers() ? 'Numbers formatted with separators (click to show raw)' : 'Numbers shown as raw values (click to format)'}\n  >\n    <Hash size={12} />\n    <span class=\"hidden sm:inline\">{getFormatNumbers() ? 'Format Numbers' : 'Raw Numbers'}</span>\n  </button>\n\n  <!-- Row limit input -->\n  <div class=\"w-px h-4 bg-gray-300 dark:bg-gray-700 mx-1\"></div>\n  <div class=\"flex items-center gap-1\">\n    {#if getMaxResultRows() > 10000}\n      <AlertTriangle size={12} class=\"text-amber-500\" />\n    {/if}\n    <span class=\"text-xs text-gray-500\">Max rows</span>\n    <input\n      type=\"number\"\n      min=\"1\"\n      class=\"w-16 px-1.5 py-0.5 text-xs rounded border focus:outline-none focus:ring-1\n        {getMaxResultRows() > 10000\n          ? 'border-amber-400 bg-amber-50 dark:bg-amber-900/20 text-amber-700 dark:text-amber-400 focus:ring-amber-400'\n          : 'border-gray-300 dark:border-gray-700 bg-transparent text-gray-600 dark:text-gray-300 focus:ring-ch-blue focus:border-ch-blue'}\"\n      value={getMaxResultRows()}\n      onchange={(e) => setMaxResultRows(parseInt(e.currentTarget.value) || 1000)}\n    />\n  </div>\n\n  <!-- Export buttons -->\n  <div class=\"flex items-center gap-1\">\n    <div class=\"relative\" bind:this={copyMenuRef}>\n      <button\n        class=\"flex items-center gap-1.5 px-2 py-1 text-xs border border-gray-300 dark:border-gray-700 rounded-md text-gray-600 dark:text-gray-300 hover:text-gray-900 dark:hover:text-gray-100 hover:bg-gray-200 dark:hover:bg-gray-800 transition-colors\"\n        onclick={() => {\n          copyMenuOpen = !copyMenuOpen\n          if (copyMenuOpen) downloadMenuOpen = false\n        }}\n      >\n        <Copy size={12} />\n        Copy\n        <ChevronUp size={12} class=\"opacity-70\" />\n      </button>\n\n      {#if copyMenuOpen}\n        <div class=\"absolute right-0 bottom-full mb-1 w-44 rounded-lg border border-gray-300 dark:border-gray-700 bg-gray-50/98 dark:bg-gray-900/98 backdrop-blur-xl shadow-xl p-1 z-20\">\n          {#each formatOptions as option}\n            <button\n              class=\"w-full flex items-center gap-2 px-2 py-1.5 text-xs rounded-md text-left transition-colors\n                {option.disabled\n                  ? 'text-gray-400 dark:text-gray-600 cursor-not-allowed'\n                  : 'text-gray-600 dark:text-gray-300 hover:text-gray-900 dark:hover:text-gray-100 hover:bg-gray-200 dark:hover:bg-gray-800'}\"\n              onclick={() => !option.disabled && handleCopy(option.id)}\n              disabled={option.disabled}\n            >\n              <option.icon size={12} />\n              {option.label}\n            </button>\n          {/each}\n        </div>\n      {/if}\n    </div>\n\n    <div class=\"relative\" bind:this={downloadMenuRef}>\n      <button\n        class=\"flex items-center gap-1.5 px-2 py-1 text-xs border border-gray-300 dark:border-gray-700 rounded-md text-gray-600 dark:text-gray-300 hover:text-gray-900 dark:hover:text-gray-100 hover:bg-gray-200 dark:hover:bg-gray-800 transition-colors\"\n        onclick={() => {\n          downloadMenuOpen = !downloadMenuOpen\n          if (downloadMenuOpen) copyMenuOpen = false\n        }}\n      >\n        <Download size={12} />\n        Download\n        <ChevronUp size={12} class=\"opacity-70\" />\n      </button>\n\n      {#if downloadMenuOpen}\n        <div class=\"absolute right-0 bottom-full mb-1 w-44 rounded-lg border border-gray-300 dark:border-gray-700 bg-gray-50/98 dark:bg-gray-900/98 backdrop-blur-xl shadow-xl p-1 z-20\">\n          {#each formatOptions as option}\n            <button\n              class=\"w-full flex items-center gap-2 px-2 py-1.5 text-xs rounded-md text-left transition-colors\n                {option.disabled\n                  ? 'text-gray-400 dark:text-gray-600 cursor-not-allowed'\n                  : 'text-gray-600 dark:text-gray-300 hover:text-gray-900 dark:hover:text-gray-100 hover:bg-gray-200 dark:hover:bg-gray-800'}\"\n              onclick={() => !option.disabled && handleDownload(option.id)}\n              disabled={option.disabled}\n            >\n              <option.icon size={12} />\n              {option.label}\n            </button>\n          {/each}\n        </div>\n      {/if}\n    </div>\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/editor/ResultPanel.svelte",
    "content": "<script lang=\"ts\">\n  import type { ColumnMeta, QueryPlanNode, QueryStats, QueryEstimateResult } from '../../types/query'\n  import VirtualTable from '../table/VirtualTable.svelte'\n  import Spinner from '../common/Spinner.svelte'\n  import ResultFooter from './ResultFooter.svelte'\n  import StatsPanel from './StatsPanel.svelte'\n  import SchemaPanel from './SchemaPanel.svelte'\n  import InsightsPanel from './InsightsPanel.svelte'\n  import { computeColumnStats } from '../../utils/stats'\n  import { SquareTerminal, AlertTriangle } from 'lucide-svelte'\n\n  type Tab = 'data' | 'stats' | 'schema' | 'insights'\n\n  interface Props {\n    meta: ColumnMeta[]\n    data: unknown[][]\n    loading?: boolean\n    error?: string | null\n    stats?: QueryStats | null\n    elapsedMs?: number\n    running?: boolean\n    streamRows?: number\n    streamChunks?: number\n    streamStartedAt?: number | null\n    streamLastChunkAt?: number | null\n    planNodes?: QueryPlanNode[]\n    planLines?: string[]\n    planSource?: string\n    planLoading?: boolean\n    planError?: string | null\n    onLoadPlan?: () => void\n    onSample?: (perShard: number) => void\n    profile?: Record<string, unknown> | null\n    profileAvailable?: boolean\n    profileReason?: string | null\n    profileLoading?: boolean\n    profileError?: string | null\n    samplingMode?: string | null\n    estimate?: QueryEstimateResult | null\n  }\n\n  let {\n    meta,\n    data,\n    loading = false,\n    error = null,\n    stats = null,\n    elapsedMs = 0,\n    running = false,\n    streamRows = 0,\n    streamChunks = 0,\n    streamStartedAt = null,\n    streamLastChunkAt = null,\n    planNodes = [],\n    planLines = [],\n    planSource = '',\n    planLoading = false,\n    planError = null,\n    onLoadPlan,\n    onSample,\n    profile = null,\n    profileAvailable = false,\n    profileReason = null,\n    profileLoading = false,\n    profileError = null,\n    samplingMode = null,\n    estimate = null,\n  }: Props = $props()\n\n  let activeTab = $state<Tab>('data')\n\n  // Reset to Data tab when query results change\n  $effect(() => {\n    meta; // track meta changes\n    activeTab = 'data'\n  })\n\n  const columnStats = $derived.by(() => {\n    if (meta.length === 0 || data.length === 0) return []\n    return computeColumnStats(meta, data)\n  })\n</script>\n\n<div class=\"flex flex-col flex-1 min-h-0\">\n  {#if loading}\n    <div class=\"flex flex-col items-center justify-center flex-1 gap-2 text-gray-500\">\n      <Spinner size=\"sm\" />\n      <span class=\"text-sm\">Executing query...</span>\n    </div>\n  {:else if error}\n    <div class=\"flex-1 p-4\">\n      <div class=\"flex items-start gap-3 bg-red-100/20 dark:bg-red-900/20 border border-red-300/50 dark:border-red-800/50 rounded-lg p-3.5\">\n        <AlertTriangle size={16} class=\"text-red-400 shrink-0 mt-0.5\" />\n        <div class=\"text-sm text-red-700 dark:text-red-300 whitespace-pre-wrap break-words\">{error}</div>\n      </div>\n    </div>\n  {:else if meta.length === 0}\n    <div class=\"flex flex-col items-center justify-center flex-1 gap-2 text-gray-400 dark:text-gray-600\">\n      <SquareTerminal size={28} class=\"text-gray-300 dark:text-gray-700\" />\n      <p class=\"text-sm\">Run a query to see results</p>\n      <p class=\"text-xs text-gray-300 dark:text-gray-700\">Cmd/Ctrl+Enter to execute</p>\n    </div>\n  {:else}\n    <!-- Tab content -->\n    {#if activeTab === 'data'}\n      <VirtualTable {meta} {data} />\n    {:else if activeTab === 'stats'}\n      <StatsPanel stats={columnStats} />\n    {:else if activeTab === 'insights'}\n      <InsightsPanel\n        {meta}\n        {data}\n        {stats}\n        {elapsedMs}\n        {running}\n        {streamRows}\n        {streamChunks}\n        {streamStartedAt}\n        {streamLastChunkAt}\n        {planNodes}\n        {planLines}\n        {planSource}\n        {planLoading}\n        {planError}\n        {onLoadPlan}\n        {onSample}\n        {profile}\n        {profileAvailable}\n        {profileReason}\n        {profileLoading}\n        {profileError}\n        {samplingMode}\n        {estimate}\n      />\n    {:else}\n      <SchemaPanel {meta} />\n    {/if}\n\n    <ResultFooter\n      {activeTab}\n      onTabChange={(tab) => activeTab = tab}\n      {meta}\n      {data}\n      {stats}\n      {elapsedMs}\n      {streamRows}\n      {streamChunks}\n    />\n  {/if}\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/editor/SchemaPanel.svelte",
    "content": "<script lang=\"ts\">\n  import type { ColumnMeta } from '../../types/query'\n  import { getDisplayType } from '../../utils/ch-types'\n\n  interface Props {\n    meta: ColumnMeta[]\n  }\n\n  let { meta }: Props = $props()\n\n  const badgeColors: Record<string, string> = {\n    number: 'bg-orange-100 text-orange-700 dark:bg-orange-900/40 dark:text-orange-300',\n    string: 'bg-green-100 text-green-700 dark:bg-green-900/40 dark:text-green-300',\n    date: 'bg-purple-100 text-purple-700 dark:bg-purple-900/40 dark:text-purple-300',\n    bool: 'bg-yellow-100 text-yellow-700 dark:bg-yellow-900/40 dark:text-yellow-300',\n    json: 'bg-orange-100 text-orange-700 dark:bg-orange-900/40 dark:text-orange-300',\n    null: 'bg-gray-100 text-gray-500 dark:bg-gray-800 dark:text-gray-400',\n    unknown: 'bg-gray-100 text-gray-500 dark:bg-gray-800 dark:text-gray-400',\n  }\n</script>\n\n<div class=\"flex-1 overflow-auto min-h-0\">\n  <table class=\"w-full text-sm border-collapse\">\n    <thead class=\"sticky top-0 z-10 bg-gray-50 dark:bg-gray-900\">\n      <tr class=\"border-b border-gray-300 dark:border-gray-700\">\n        <th class=\"px-3 py-1.5 text-left text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400 w-10\">#</th>\n        <th class=\"px-3 py-1.5 text-left text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">Column Name</th>\n        <th class=\"px-3 py-1.5 text-left text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">ClickHouse Type</th>\n        <th class=\"px-3 py-1.5 text-left text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">Category</th>\n      </tr>\n    </thead>\n    <tbody>\n      {#each meta as col, i}\n        {@const dt = getDisplayType(col.type)}\n        <tr class=\"border-b border-gray-100 dark:border-gray-900 {i % 2 === 1 ? 'bg-gray-50/50 dark:bg-gray-900/30' : ''}\">\n          <td class=\"px-3 py-1.5 text-gray-400 tabular-nums\">{i + 1}</td>\n          <td class=\"px-3 py-1.5 font-mono text-xs\">{col.name}</td>\n          <td class=\"px-3 py-1.5 text-gray-600 dark:text-gray-400 font-mono text-xs\">{col.type}</td>\n          <td class=\"px-3 py-1.5\">\n            <span class=\"inline-block px-2 py-0.5 text-xs font-medium rounded {badgeColors[dt]}\">{dt}</span>\n          </td>\n        </tr>\n      {/each}\n    </tbody>\n  </table>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/editor/SqlEditor.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount, onDestroy } from 'svelte'\n  import { EditorView, keymap } from '@codemirror/view'\n  import { EditorState, Compartment } from '@codemirror/state'\n  import { sql, StandardSQL } from '@codemirror/lang-sql'\n  import { autocompletion } from '@codemirror/autocomplete'\n  import { defaultKeymap, history, historyKeymap } from '@codemirror/commands'\n  import { bracketMatching, HighlightStyle, syntaxHighlighting } from '@codemirror/language'\n  import { highlightSelectionMatches, searchKeymap } from '@codemirror/search'\n  import { closeBrackets, closeBracketsKeymap } from '@codemirror/autocomplete'\n  import { tags as t } from '@lezer/highlight'\n  import { clickhouseCompletionSource } from '../../editor/completions'\n  import { getTheme } from '../../stores/theme.svelte'\n\n  interface Props {\n    value?: string\n    onrun?: (sql: string) => void\n    onchange?: (sql: string) => void\n  }\n\n  let { value = '', onrun, onchange }: Props = $props()\n\n  let container: HTMLDivElement\n  let view: EditorView | undefined\n  const themeCompartment = new Compartment()\n\n  const lightTheme = EditorView.theme({\n    '&': { backgroundColor: 'rgba(255,255,255,0.94)' },\n    '.cm-gutters': { backgroundColor: '#f4f4f5', borderRight: '1px solid #d4d4d8' },\n    '.cm-activeLineGutter': { backgroundColor: '#ececef' },\n    '.cm-activeLine': { backgroundColor: '#f3f4f6' },\n    '.cm-selectionBackground': { backgroundColor: '#fed7aa !important' },\n    '&.cm-focused .cm-selectionBackground': { backgroundColor: '#fdba74 !important' },\n    '.cm-cursor': { borderLeftColor: '#1f2126' },\n    '.cm-matchingBracket': { backgroundColor: '#ffedd5', outline: '1px solid #f97316' },\n    '.cm-tooltip-autocomplete': {\n      border: '1px solid #d4d4d8',\n      borderRadius: '10px',\n      backgroundColor: '#ffffff',\n      boxShadow: '0 14px 30px rgba(0,0,0,0.12)',\n      overflow: 'hidden',\n    },\n    '.cm-tooltip-autocomplete > ul': {\n      maxHeight: '320px',\n      fontFamily: 'var(--font-sans)',\n    },\n    '.cm-tooltip-autocomplete > ul > li': {\n      fontSize: '12px',\n      padding: '6px 10px',\n    },\n    '.cm-tooltip-autocomplete > ul > li[aria-selected]': {\n      backgroundColor: 'rgba(249,115,22,0.16)',\n      color: '#1f2126',\n    },\n  }, { dark: false })\n\n  const darkTheme = EditorView.theme({\n    '&': { backgroundColor: 'rgba(22,23,28,0.96)', color: '#f3f4f6' },\n    '.cm-gutters': { backgroundColor: 'rgba(28,30,36,0.96)', borderRight: '1px solid #42454f', color: '#a5a8b2' },\n    '.cm-activeLine': { backgroundColor: 'rgba(249,115,22,0.12)' },\n    '.cm-activeLineGutter': { backgroundColor: 'rgba(249,115,22,0.18)' },\n    '.cm-selectionBackground': { backgroundColor: 'rgba(251,146,60,0.32) !important' },\n    '.cm-matchingBracket': { backgroundColor: 'rgba(249,115,22,0.22)', outline: '1px solid rgba(251,146,60,0.9)' },\n    '.cm-panels': { backgroundColor: '#18191f', color: '#fed7aa' },\n    '.cm-cursor': { borderLeftColor: '#f3f4f6' },\n    '.cm-tooltip-autocomplete': {\n      border: '1px solid #3f434c',\n      borderRadius: '10px',\n      backgroundColor: '#1b1d22',\n      boxShadow: '0 16px 38px rgba(0,0,0,0.5)',\n      overflow: 'hidden',\n    },\n    '.cm-tooltip-autocomplete > ul': {\n      maxHeight: '320px',\n      fontFamily: 'var(--font-sans)',\n    },\n    '.cm-tooltip-autocomplete > ul > li': {\n      fontSize: '12px',\n      padding: '6px 10px',\n    },\n    '.cm-tooltip-autocomplete > ul > li[aria-selected]': {\n      backgroundColor: 'rgba(249,115,22,0.22)',\n      color: '#f8fafc',\n    },\n  }, { dark: true })\n\n  const lightHighlight = HighlightStyle.define([\n    { tag: [t.keyword, t.operatorKeyword, t.controlKeyword, t.definitionKeyword, t.moduleKeyword], color: '#c2410c', fontWeight: '600' },\n    { tag: [t.function(t.variableName), t.function(t.propertyName)], color: '#ea580c' },\n    { tag: [t.variableName, t.definition(t.variableName), t.definition(t.name), t.special(t.variableName)], color: '#27272a' },\n    { tag: [t.propertyName], color: '#166534' },\n    { tag: [t.typeName, t.className], color: '#374151', fontWeight: '500' },\n    { tag: [t.string, t.special(t.string)], color: '#15803d' },\n    { tag: [t.number, t.integer, t.float, t.atom], color: '#b45309' },\n    { tag: [t.bool, t.null], color: '#a16207', fontWeight: '600' },\n    { tag: [t.comment], color: '#71717a', fontStyle: 'italic' },\n    { tag: [t.operator, t.punctuation, t.separator, t.bracket], color: '#52525b' },\n    { tag: t.invalid, color: '#b91c1c', textDecoration: 'underline wavy' },\n  ])\n\n  const darkHighlight = HighlightStyle.define([\n    { tag: [t.keyword, t.operatorKeyword, t.controlKeyword, t.definitionKeyword, t.moduleKeyword], color: '#fb923c', fontWeight: '600' },\n    { tag: [t.function(t.variableName), t.function(t.propertyName)], color: '#fdba74' },\n    { tag: [t.variableName, t.definition(t.variableName), t.definition(t.name), t.special(t.variableName)], color: '#f4f4f5' },\n    { tag: [t.propertyName], color: '#86efac' },\n    { tag: [t.typeName, t.className], color: '#d4d4d8', fontWeight: '500' },\n    { tag: [t.string, t.special(t.string)], color: '#4ade80' },\n    { tag: [t.number, t.integer, t.float, t.atom], color: '#fbbf24' },\n    { tag: [t.bool, t.null], color: '#f59e0b', fontWeight: '600' },\n    { tag: [t.comment], color: '#9ca3af', fontStyle: 'italic' },\n    { tag: [t.operator, t.punctuation, t.separator, t.bracket], color: '#d4d4d8' },\n    { tag: t.invalid, color: '#f87171', textDecoration: 'underline wavy' },\n  ])\n\n  function getThemeExtension() {\n    return getTheme() === 'dark'\n      ? [darkTheme, syntaxHighlighting(darkHighlight)]\n      : [lightTheme, syntaxHighlighting(lightHighlight)]\n  }\n\n  const runKeyBinding = keymap.of([\n    {\n      key: 'Mod-Enter',\n      run: (v) => {\n        const main = v.state.selection.main\n        const selected = v.state.sliceDoc(main.from, main.to).trim()\n        onrun?.(selected || v.state.doc.toString())\n        return true\n      },\n    },\n  ])\n\n  // Bun can install a second @codemirror/view copy under transitive deps;\n  // normalize keymaps to one array type for TS while runtime dedupe is handled by Vite.\n  const editorKeymaps = [...defaultKeymap, ...historyKeymap, ...searchKeymap, ...closeBracketsKeymap] as any\n\n  onMount(() => {\n    const state = EditorState.create({\n      doc: value,\n      extensions: [\n        runKeyBinding,\n        keymap.of(editorKeymaps),\n        history(),\n        bracketMatching(),\n        closeBrackets(),\n        highlightSelectionMatches(),\n        sql({ dialect: StandardSQL }),\n        autocompletion({\n          override: [clickhouseCompletionSource],\n          activateOnTyping: true,\n          maxRenderedOptions: 300,\n          defaultKeymap: true,\n          selectOnOpen: true,\n        }),\n        themeCompartment.of(getThemeExtension()),\n        EditorView.theme({\n          '&': { height: '100%', fontSize: '13px' },\n          '.cm-scroller': { overflow: 'auto', fontFamily: 'var(--font-mono)' },\n        }),\n        EditorView.updateListener.of((update) => {\n          if (update.docChanged) {\n            onchange?.(update.state.doc.toString())\n          }\n        }),\n        EditorView.lineWrapping,\n      ],\n    })\n\n    view = new EditorView({ state, parent: container })\n\n    // Watch for theme changes via MutationObserver on <html> class\n    const observer = new MutationObserver(() => {\n      if (view) {\n        view.dispatch({\n          effects: themeCompartment.reconfigure(getThemeExtension()),\n        })\n      }\n    })\n    observer.observe(document.documentElement, { attributes: true, attributeFilter: ['class'] })\n\n    return () => observer.disconnect()\n  })\n\n  onDestroy(() => {\n    view?.destroy()\n  })\n\n  /** Set the editor content programmatically */\n  export function setValue(text: string) {\n    if (view) {\n      view.dispatch({\n        changes: { from: 0, to: view.state.doc.length, insert: text },\n      })\n    }\n  }\n\n  /** Get the current editor content */\n  export function getValue(): string {\n    return view?.state.doc.toString() ?? ''\n  }\n\n  /** Get the selected text if any, otherwise the full document */\n  export function getSelectedOrAll(): string {\n    if (!view) return ''\n    const main = view.state.selection.main\n    const selected = view.state.sliceDoc(main.from, main.to).trim()\n    return selected || view.state.doc.toString()\n  }\n</script>\n\n<div bind:this={container} class=\"h-full w-full\"></div>\n"
  },
  {
    "path": "ui/src/lib/components/editor/StatsPanel.svelte",
    "content": "<script lang=\"ts\">\n  import type { ColumnStats } from '../../utils/stats'\n  import { formatNumber } from '../../utils/format'\n\n  interface Props {\n    stats: ColumnStats[]\n  }\n\n  let { stats }: Props = $props()\n\n  function fmt(v: number | undefined): string {\n    if (v === undefined) return '\\u2014'\n    if (Number.isInteger(v)) return formatNumber(v)\n    return v.toLocaleString(undefined, { maximumFractionDigits: 2 })\n  }\n\n  function pct(v: number): string {\n    return v.toFixed(1) + '%'\n  }\n</script>\n\n<div class=\"flex-1 overflow-auto min-h-0\">\n  <table class=\"w-full text-sm border-collapse\">\n    <thead class=\"sticky top-0 z-10 bg-gray-50 dark:bg-gray-900\">\n      <tr class=\"border-b border-gray-300 dark:border-gray-700\">\n        <th class=\"px-3 py-1.5 text-left text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">Name</th>\n        <th class=\"px-3 py-1.5 text-left text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">Type</th>\n        <th class=\"px-3 py-1.5 text-right text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">Count</th>\n        <th class=\"px-3 py-1.5 text-right text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">Nulls</th>\n        <th class=\"px-3 py-1.5 text-right text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">Null%</th>\n        <th class=\"px-3 py-1.5 text-right text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">Min</th>\n        <th class=\"px-3 py-1.5 text-right text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">Max</th>\n        <th class=\"px-3 py-1.5 text-right text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">Avg</th>\n        <th class=\"px-3 py-1.5 text-right text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">Sum</th>\n        <th class=\"px-3 py-1.5 text-right text-xs font-medium uppercase tracking-wider text-gray-500 dark:text-gray-400\">Distinct</th>\n      </tr>\n    </thead>\n    <tbody>\n      {#each stats as col, i}\n        <tr class=\"border-b border-gray-100 dark:border-gray-900 {i % 2 === 1 ? 'bg-gray-50/50 dark:bg-gray-900/30' : ''}\">\n          <td class=\"px-3 py-1.5 font-mono text-xs\">{col.name}</td>\n          <td class=\"px-3 py-1.5 text-gray-500 text-xs\">{col.displayType}</td>\n          <td class=\"px-3 py-1.5 text-right tabular-nums\">{formatNumber(col.count)}</td>\n          <td class=\"px-3 py-1.5 text-right tabular-nums\">{formatNumber(col.nulls)}</td>\n          <td class=\"px-3 py-1.5 text-right tabular-nums\">{pct(col.nullPct)}</td>\n          <td class=\"px-3 py-1.5 text-right tabular-nums\">\n            {#if col.displayType === 'number'}\n              {fmt(col.min)}\n            {:else if col.displayType === 'string'}\n              {col.minLen !== undefined ? fmt(col.minLen) + ' ch' : '\\u2014'}\n            {:else if col.displayType === 'date'}\n              {col.earliest ?? '\\u2014'}\n            {:else}\n              {'\\u2014'}\n            {/if}\n          </td>\n          <td class=\"px-3 py-1.5 text-right tabular-nums\">\n            {#if col.displayType === 'number'}\n              {fmt(col.max)}\n            {:else if col.displayType === 'string'}\n              {col.maxLen !== undefined ? fmt(col.maxLen) + ' ch' : '\\u2014'}\n            {:else if col.displayType === 'date'}\n              {col.latest ?? '\\u2014'}\n            {:else}\n              {'\\u2014'}\n            {/if}\n          </td>\n          <td class=\"px-3 py-1.5 text-right tabular-nums\">\n            {#if col.displayType === 'number'}\n              {fmt(col.avg)}\n            {:else if col.displayType === 'string'}\n              {col.avgLen !== undefined ? fmt(col.avgLen) + ' ch' : '\\u2014'}\n            {:else}\n              {'\\u2014'}\n            {/if}\n          </td>\n          <td class=\"px-3 py-1.5 text-right tabular-nums\">\n            {fmt(col.sum)}\n          </td>\n          <td class=\"px-3 py-1.5 text-right tabular-nums\">\n            {col.distinct !== undefined ? formatNumber(col.distinct) : '\\u2014'}\n          </td>\n        </tr>\n      {/each}\n    </tbody>\n  </table>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/editor/Toolbar.svelte",
    "content": "<script lang=\"ts\">\n  import Button from '../common/Button.svelte'\n  import { Play, Square, AlignLeft, BookOpen, Save, Zap } from 'lucide-svelte'\n  import type { QueryEstimateResult } from '../../types/query'\n  import { formatNumber } from '../../utils/format'\n\n  interface Props {\n    running?: boolean\n    onrun: () => void\n    oncancel?: () => void\n    onformat?: () => void\n    onexplain?: () => void\n    onsave?: () => void\n    estimate?: QueryEstimateResult | null\n    estimateLoading?: boolean\n  }\n\n  let { running = false, onrun, oncancel, onformat, onexplain, onsave, estimate = null, estimateLoading = false }: Props = $props()\n\n  const estimateLabel = $derived.by(() => {\n    if (estimateLoading) return 'Estimating...'\n    if (!estimate || !estimate.success || estimate.error) return null\n    if (estimate.total_rows === 0 && estimate.total_parts === 0) return null\n    const rows = formatNumber(estimate.total_rows)\n    const parts = estimate.total_parts\n    return `~${rows} rows · ${parts} part${parts !== 1 ? 's' : ''}`\n  })\n</script>\n\n<div class=\"flex items-center gap-2 px-2 py-1.5 border-b border-gray-200 dark:border-gray-800 bg-gray-100/50 dark:bg-gray-900/50\">\n  {#if running && oncancel}\n    <Button size=\"sm\" variant=\"ghost\" onclick={oncancel}>\n      <Square size={14} class=\"text-red-400\" />\n      <span class=\"text-red-400\">Cancel</span>\n    </Button>\n  {:else}\n    <Button size=\"sm\" onclick={onrun} loading={running}>\n      <Play size={14} />\n      Run\n    </Button>\n  {/if}\n\n  {#if onformat}\n    <Button size=\"sm\" variant=\"ghost\" onclick={onformat}>\n      <AlignLeft size={14} />\n      Format\n    </Button>\n  {/if}\n\n  {#if onexplain}\n    <Button size=\"sm\" variant=\"ghost\" onclick={onexplain}>\n      <BookOpen size={14} />\n      Explain\n    </Button>\n  {/if}\n\n  {#if estimateLabel}\n    <div class=\"flex items-center gap-1.5 px-2.5 py-1 rounded-md bg-ch-blue/10 dark:bg-ch-blue/15 text-ch-blue text-xs font-medium border border-ch-blue/20\">\n      <Zap size={12} />\n      {estimateLabel}\n    </div>\n  {:else if estimateLoading}\n    <div class=\"flex items-center gap-1.5 px-2.5 py-1 rounded-md bg-gray-100 dark:bg-gray-800 text-gray-500 text-xs\">\n      <Zap size={12} class=\"animate-pulse\" />\n      Estimating...\n    </div>\n  {/if}\n\n  <div class=\"flex-1\"></div>\n\n  {#if onsave}\n    <Button size=\"sm\" variant=\"ghost\" onclick={onsave}>\n      <Save size={14} />\n      Save\n    </Button>\n  {/if}\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/explorer/DataPreview.svelte",
    "content": "<script lang=\"ts\">\n  import type { ColumnMeta } from '../../types/query'\n  import { fetchExplorerData } from '../../api/query'\n  import VirtualTable from '../table/VirtualTable.svelte'\n  import Pagination from '../table/Pagination.svelte'\n  import Spinner from '../common/Spinner.svelte'\n\n  interface Props {\n    database: string\n    table: string\n  }\n\n  let { database, table }: Props = $props()\n\n  let meta = $state<ColumnMeta[]>([])\n  let data = $state<unknown[][]>([])\n  let page = $state(0)\n  let pageSize = $state(100)\n  let totalRows = $state(0)\n  let sortColumn = $state('')\n  let sortDir = $state<'asc' | 'desc'>('asc')\n  let loading = $state(false)\n  let error = $state<string | null>(null)\n\n  async function loadData() {\n    loading = true\n    error = null\n    try {\n      const res = await fetchExplorerData({\n        database,\n        table,\n        page,\n        page_size: pageSize,\n        sort_column: sortColumn,\n        sort_dir: sortDir,\n      })\n      meta = res.meta ?? []\n      data = res.data ?? []\n      totalRows = res.total_rows ?? 0\n    } catch (e: any) {\n      error = e.message\n    } finally {\n      loading = false\n    }\n  }\n\n  function handleSort(column: string) {\n    if (sortColumn === column) {\n      sortDir = sortDir === 'asc' ? 'desc' : 'asc'\n    } else {\n      sortColumn = column\n      sortDir = 'asc'\n    }\n    page = 0\n    loadData()\n  }\n\n  function handlePageChange(newPage: number) {\n    page = newPage\n    loadData()\n  }\n\n  // Load when database/table changes\n  $effect(() => {\n    if (database && table) {\n      page = 0\n      sortColumn = ''\n      sortDir = 'asc'\n      loadData()\n    }\n  })\n</script>\n\n<div class=\"flex flex-col h-full\">\n  <div class=\"px-3 py-2 border-b border-gray-200 dark:border-gray-800 bg-gray-100/50 dark:bg-gray-900/50 text-sm text-gray-700 dark:text-gray-300\">\n    <span class=\"text-gray-500\">{database}.</span><span class=\"font-medium\">{table}</span>\n  </div>\n\n  {#if loading && meta.length === 0}\n    <div class=\"flex items-center justify-center flex-1 gap-2\">\n      <Spinner size=\"sm\" />\n      <span class=\"text-sm text-gray-500\">Loading...</span>\n    </div>\n  {:else if error}\n    <div class=\"p-4\">\n      <div class=\"bg-red-100/20 dark:bg-red-900/20 border border-red-300/50 dark:border-red-800/50 rounded-lg p-3 text-sm text-red-700 dark:text-red-300\">\n        {error}\n      </div>\n    </div>\n  {:else if meta.length > 0}\n    <VirtualTable\n      {meta}\n      {data}\n      {sortColumn}\n      {sortDir}\n      onsort={handleSort}\n    />\n    {#if totalRows > pageSize}\n      <Pagination {page} {pageSize} {totalRows} onchange={handlePageChange} />\n    {/if}\n  {:else}\n    <div class=\"flex items-center justify-center flex-1 text-gray-400 dark:text-gray-600 text-sm\">\n      Select a table to preview data\n    </div>\n  {/if}\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/explorer/DatabaseTree.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from \"svelte\";\n  import {\n    getDatabases,\n    isSchemaLoading,\n    loadDatabases,\n    loadTables,\n    toggleDatabase,\n    toggleTable,\n  } from \"../../stores/schema.svelte\";\n  import {\n    openQueryTab,\n    openTableTab,\n    openDatabaseTab,\n  } from \"../../stores/tabs.svelte\";\n  import { getSession } from \"../../stores/session.svelte\";\n  import { apiGet, apiPost, apiPostForm } from \"../../api/client\";\n  import {\n    success as toastSuccess,\n    error as toastError,\n  } from \"../../stores/toast.svelte\";\n  import Spinner from \"../common/Spinner.svelte\";\n  import Sheet from \"../common/Sheet.svelte\";\n  import Combobox, { type ComboboxOption } from \"../common/Combobox.svelte\";\n  import {\n    Database,\n    Table2,\n    ChevronRight,\n    ChevronDown,\n    Columns3,\n    Search,\n    X,\n    MoreHorizontal,\n    Play,\n    Info,\n    RefreshCw,\n    Copy,\n    FolderPlus,\n    TableProperties,\n    Trash2,\n    Plus,\n    Upload,\n    AlertTriangle,\n    Eye,\n    Layers,\n  } from \"lucide-svelte\";\n  import type { Table } from \"../../types/schema\";\n  import ContextMenu, {\n    type ContextMenuItem,\n  } from \"../common/ContextMenu.svelte\";\n\n  interface Props {\n    onSelectTable?: (database: string, table: string) => void;\n    onSelectDatabase?: (database: string) => void;\n  }\n\n  interface TableColumnDraft {\n    id: string;\n    name: string;\n    type: string;\n    defaultExpression: string;\n    comment: string;\n  }\n\n  interface UploadColumnDraft {\n    id: string;\n    name: string;\n    type: string;\n    sample: string;\n  }\n\n  let { onSelectTable, onSelectDatabase }: Props = $props();\n\n  onMount(() => {\n    loadDatabases();\n  });\n\n  const databases = $derived(getDatabases());\n  const loading = $derived(isSchemaLoading());\n  const session = $derived(getSession());\n  const canManageSchema = $derived(session?.role === \"admin\");\n\n  let searchTerm = $state(\"\");\n  let showSystemDbs = $state(false);\n  const SYSTEM_DBS = [\"system\", \"information_schema\", \"INFORMATION_SCHEMA\"];\n\n  const databaseEngineOptions: ComboboxOption[] = [\n    { value: \"Atomic\", label: \"Atomic\", hint: \"Default modern engine\" },\n    { value: \"Ordinary\", label: \"Ordinary\", hint: \"Legacy engine\" },\n    { value: \"Lazy\", label: \"Lazy\", hint: \"On-demand metadata load\" },\n    {\n      value: \"Replicated\",\n      label: \"Replicated\",\n      hint: \"Replicated database metadata\",\n    },\n  ];\n\n  const tableEngineOptions: ComboboxOption[] = [\n    {\n      value: \"MergeTree\",\n      label: \"MergeTree\",\n      hint: \"General purpose OLAP engine\",\n    },\n    {\n      value: \"ReplacingMergeTree\",\n      label: \"ReplacingMergeTree\",\n      hint: \"Dedup by replacing rows\",\n    },\n    {\n      value: \"SummingMergeTree\",\n      label: \"SummingMergeTree\",\n      hint: \"Summation by key on merge\",\n    },\n    {\n      value: \"AggregatingMergeTree\",\n      label: \"AggregatingMergeTree\",\n      hint: \"Aggregated states storage\",\n    },\n    {\n      value: \"CollapsingMergeTree\",\n      label: \"CollapsingMergeTree\",\n      hint: \"Row collapsing with sign column\",\n    },\n    {\n      value: \"VersionedCollapsingMergeTree\",\n      label: \"VersionedCollapsingMergeTree\",\n      hint: \"Version-aware collapse\",\n    },\n    { value: \"Log\", label: \"Log\", hint: \"Simple append-only log\" },\n    { value: \"StripeLog\", label: \"StripeLog\", hint: \"Column stripe log\" },\n    { value: \"TinyLog\", label: \"TinyLog\", hint: \"Lightweight tiny log\" },\n    { value: \"Memory\", label: \"Memory\", hint: \"In-memory ephemeral table\" },\n  ];\n\n  let columnSeed = 0;\n  function newColumnDraft(name = \"\", type = \"String\"): TableColumnDraft {\n    columnSeed += 1;\n    return {\n      id: `col-${columnSeed}`,\n      name,\n      type,\n      defaultExpression: \"\",\n      comment: \"\",\n    };\n  }\n\n  type MenuTarget =\n    | { kind: \"root\" }\n    | { kind: \"database\"; database: string }\n    | { kind: \"table\"; database: string; table: string };\n\n  let menu = $state<{ target: MenuTarget; x: number; y: number } | null>(null);\n\n  let clusters = $state<string[]>([]);\n  let clustersLoading = $state(false);\n  let dataTypes = $state<string[]>([]);\n  let dataTypesLoading = $state(false);\n\n  const fallbackDataTypes = [\n    \"String\",\n    \"UInt8\",\n    \"UInt16\",\n    \"UInt32\",\n    \"UInt64\",\n    \"Int8\",\n    \"Int16\",\n    \"Int32\",\n    \"Int64\",\n    \"Float32\",\n    \"Float64\",\n    \"Boolean\",\n    \"UUID\",\n    \"Date\",\n    \"Date32\",\n    \"DateTime\",\n    \"DateTime64(3)\",\n    \"Decimal(18, 2)\",\n    \"Array(String)\",\n    \"Tuple(String, UInt64)\",\n    \"Map(String, String)\",\n    \"Nullable(String)\",\n    \"LowCardinality(String)\",\n    \"FixedString(16)\",\n    \"IPv4\",\n    \"IPv6\",\n    \"JSON\",\n  ];\n\n  const parametricTypeTemplates: Record<string, string> = {\n    Array: \"Array(String)\",\n    Tuple: \"Tuple(String, UInt64)\",\n    Map: \"Map(String, String)\",\n    Nullable: \"Nullable(String)\",\n    LowCardinality: \"LowCardinality(String)\",\n    Decimal: \"Decimal(18, 2)\",\n    DateTime64: \"DateTime64(3)\",\n    FixedString: \"FixedString(16)\",\n    AggregateFunction: \"AggregateFunction(sum, UInt64)\",\n    SimpleAggregateFunction: \"SimpleAggregateFunction(sum, UInt64)\",\n    Enum8: \"Enum8('a' = 1)\",\n    Enum16: \"Enum16('a' = 1)\",\n  };\n\n  function normalizeDataTypeTemplate(typeName: string): string {\n    const trimmed = typeName.trim();\n    if (!trimmed) return trimmed;\n    return parametricTypeTemplates[trimmed] ?? trimmed;\n  }\n\n  let createDatabaseSheetOpen = $state(false);\n  let createDatabaseSubmitting = $state(false);\n  let createDatabaseForm = $state({\n    name: \"\",\n    engine: \"Atomic\",\n    onCluster: \"\",\n    ifNotExists: true,\n  });\n\n  let createTableSheetOpen = $state(false);\n  let createTableSubmitting = $state(false);\n  let createTableErrorMessage = $state(\"\");\n  let createTableForm = $state({\n    database: \"\",\n    name: \"\",\n    engine: \"MergeTree\",\n    onCluster: \"\",\n    ifNotExists: true,\n    orderBy: \"\",\n    partitionBy: \"\",\n    primaryKey: \"\",\n    sampleBy: \"\",\n    ttl: \"\",\n    settings: \"\",\n    comment: \"\",\n    columns: [\n      newColumnDraft(\"id\", \"UInt64\"),\n      newColumnDraft(\"created_at\", \"DateTime\"),\n    ],\n  });\n\n  let deleteDatabaseSheetOpen = $state(false);\n  let deleteDatabaseSubmitting = $state(false);\n  let deleteDatabaseForm = $state({\n    name: \"\",\n    onCluster: \"\",\n    sync: true,\n    typedName: \"\",\n  });\n\n  let deleteTableSheetOpen = $state(false);\n  let deleteTableSubmitting = $state(false);\n  let deleteTableForm = $state({\n    database: \"\",\n    name: \"\",\n    onCluster: \"\",\n    sync: true,\n    typedName: \"\",\n  });\n\n  type UploadTargetMode = \"new\" | \"existing\";\n  let uploadSheetOpen = $state(false);\n  let uploadDiscovering = $state(false);\n  let uploadSubmitting = $state(false);\n  let uploadTablesLoading = $state(false);\n  let uploadSourceFile = $state<File | null>(null);\n  let uploadSourceFormat = $state(\"\");\n  let uploadRowsDetected = $state(0);\n  let uploadPreviewRows = $state<Array<Record<string, unknown>>>([]);\n  let uploadColumns = $state<UploadColumnDraft[]>([]);\n  let uploadErrorMessage = $state(\"\");\n  let uploadCreateSQL = $state(\"\");\n  let uploadInsertSQL = $state(\"\");\n  let uploadForm = $state({\n    database: \"\",\n    mode: \"new\" as UploadTargetMode,\n    tableName: \"\",\n    existingTable: \"\",\n    engine: \"MergeTree\",\n    onCluster: \"\",\n    ifNotExists: true,\n    orderBy: \"\",\n    partitionBy: \"\",\n    primaryKey: \"\",\n    comment: \"\",\n  });\n  let uploadTables = $state<string[]>([]);\n\n  const uploadAccept = \".csv,.parquet,.json,.jsonl\";\n  let uploadColumnSeed = 0;\n\n  const databaseOptions = $derived.by<ComboboxOption[]>(() =>\n    databases\n      .map((db) => ({\n        value: db.name,\n        label: db.name,\n        hint: db.tables\n          ? `${db.tables.length} tables loaded`\n          : \"Tables not loaded\",\n      }))\n      .sort((a, b) => a.label.localeCompare(b.label)),\n  );\n\n  const clusterOptions = $derived.by<ComboboxOption[]>(() => [\n    {\n      value: \"\",\n      label: \"No cluster (local only)\",\n      hint: \"Run on connected server only\",\n    },\n    ...clusters.map((cluster) => ({ value: cluster, label: cluster })),\n  ]);\n\n  const dataTypeOptions = $derived.by<ComboboxOption[]>(() => {\n    const source = dataTypes.length > 0 ? dataTypes : fallbackDataTypes;\n    const options: ComboboxOption[] = [];\n    const seen = new Set<string>();\n    for (const typeName of source) {\n      const template = normalizeDataTypeTemplate(typeName);\n      if (!template || seen.has(template)) continue;\n      seen.add(template);\n      options.push({\n        value: template,\n        label: template,\n        keywords: `${typeName.toLowerCase()} ${template.toLowerCase()}`,\n      });\n    }\n    return options;\n  });\n\n  const uploadTableOptions = $derived.by<ComboboxOption[]>(() =>\n    uploadTables.map((table) => ({\n      value: table,\n      label: table,\n    })),\n  );\n\n  const filteredDatabases = $derived.by(() => {\n    let dbs = databases;\n    if (!showSystemDbs) {\n      dbs = dbs.filter((db) => !SYSTEM_DBS.includes(db.name));\n    }\n    if (!searchTerm.trim()) return dbs;\n\n    const term = searchTerm.toLowerCase();\n    return dbs\n      .map((db) => {\n        const dbMatches = db.name.toLowerCase().includes(term);\n        const matchingTables = db.tables?.filter((t) =>\n          t.name.toLowerCase().includes(term),\n        );\n        if (dbMatches) return db;\n        if (matchingTables && matchingTables.length > 0) {\n          return { ...db, tables: matchingTables, expanded: true };\n        }\n        return null;\n      })\n      .filter(Boolean) as typeof dbs;\n  });\n\n  function isSystemDatabase(name: string): boolean {\n    return SYSTEM_DBS.includes(name);\n  }\n\n  function getTableType(engine?: string): \"view\" | \"materialized-view\" | \"table\" {\n    if (!engine) return \"table\";\n    const e = engine.toLowerCase();\n    if (e === \"materializedview\" || e === \"materialized view\") return \"materialized-view\";\n    if (e === \"view\") return \"view\";\n    return \"table\";\n  }\n\n  function getTableTypeLabel(engine?: string): string {\n    const t = getTableType(engine);\n    if (t === \"view\") return \"VIEW\";\n    if (t === \"materialized-view\") return \"MAT. VIEW\";\n    return \"\";\n  }\n\n  function closeMenu() {\n    menu = null;\n  }\n\n  function openSchemaActionsMenu(e: MouseEvent) {\n    e.preventDefault();\n    e.stopPropagation();\n    const target = e.currentTarget as HTMLElement | null;\n    const rect = target?.getBoundingClientRect();\n    menu = {\n      target: { kind: \"root\" },\n      x: rect ? rect.right - 180 : e.clientX,\n      y: rect ? rect.bottom + 6 : e.clientY,\n    };\n  }\n\n  function openContextMenu(e: MouseEvent, target: MenuTarget) {\n    e.preventDefault();\n    e.stopPropagation();\n    menu = {\n      target,\n      x: Math.min(window.innerWidth - 220, e.clientX),\n      y: Math.min(window.innerHeight - 220, e.clientY),\n    };\n  }\n\n  function selectTable(database: string, table: string) {\n    if (onSelectTable) {\n      onSelectTable(database, table);\n    } else {\n      openTableTab(database, table);\n    }\n  }\n\n  function selectDatabase(database: string) {\n    if (onSelectDatabase) {\n      onSelectDatabase(database);\n    } else {\n      openDatabaseTab(database);\n    }\n  }\n\n  async function copyToClipboard(value: string) {\n    try {\n      await navigator.clipboard.writeText(value);\n      toastSuccess(\"Copied to clipboard\");\n    } catch {\n      toastError(\"Clipboard unavailable\");\n    } finally {\n      closeMenu();\n    }\n  }\n\n  function queryDatabase(database: string) {\n    openQueryTab(`SHOW TABLES FROM \\`${database}\\``);\n    closeMenu();\n  }\n\n  function queryTable(database: string, table: string) {\n    openQueryTab(`SELECT *\\nFROM \\`${database}\\`.\\`${table}\\`\\nLIMIT 1000`);\n    closeMenu();\n  }\n\n  function viewDatabaseInfo(database: string) {\n    selectDatabase(database);\n    closeMenu();\n  }\n\n  function viewTableInfo(database: string, table: string) {\n    selectTable(database, table);\n    closeMenu();\n  }\n\n  function refreshDatabase(database: string) {\n    loadTables(database);\n    closeMenu();\n  }\n\n  async function refreshSchema() {\n    if (loading) return;\n    await loadDatabases();\n  }\n\n  async function loadClusters(force = false) {\n    if (clustersLoading) return;\n    if (!force && clusters.length > 0) return;\n    clustersLoading = true;\n    try {\n      const res = await apiGet<{ clusters: string[] }>(\"/api/query/clusters\");\n      clusters = res.clusters ?? [];\n    } catch {\n      clusters = [];\n    } finally {\n      clustersLoading = false;\n    }\n  }\n\n  async function loadDataTypes(force = false) {\n    if (dataTypesLoading) return;\n    if (!force && dataTypes.length > 0) return;\n    dataTypesLoading = true;\n    try {\n      const res = await apiGet<{ data_types: string[] }>(\n        \"/api/query/data-types\",\n      );\n      const incoming = res.data_types ?? [];\n      const unique = Array.from(\n        new Set(incoming.map((typeName) => typeName.trim()).filter(Boolean)),\n      );\n      dataTypes = unique.sort((a, b) => a.localeCompare(b));\n    } catch {\n      dataTypes = [];\n    } finally {\n      dataTypesLoading = false;\n    }\n  }\n\n  function dataTypeOptionsFor(selectedType: string): ComboboxOption[] {\n    const options = dataTypeOptions;\n    const normalized = selectedType.trim();\n    if (!normalized) return options;\n    if (options.some((opt) => opt.value === normalized)) return options;\n    return [{ value: normalized, label: normalized }, ...options];\n  }\n\n  function resetCreateDatabaseForm() {\n    createDatabaseForm = {\n      name: \"\",\n      engine: \"Atomic\",\n      onCluster: \"\",\n      ifNotExists: true,\n    };\n  }\n\n  function resetCreateTableForm() {\n    createTableErrorMessage = \"\";\n    createTableForm = {\n      database: \"\",\n      name: \"\",\n      engine: \"MergeTree\",\n      onCluster: \"\",\n      ifNotExists: true,\n      orderBy: \"\",\n      partitionBy: \"\",\n      primaryKey: \"\",\n      sampleBy: \"\",\n      ttl: \"\",\n      settings: \"\",\n      comment: \"\",\n      columns: [\n        newColumnDraft(\"id\", \"UInt64\"),\n        newColumnDraft(\"created_at\", \"DateTime\"),\n      ],\n    };\n  }\n\n  function openCreateDatabaseSheet() {\n    if (!canManageSchema) {\n      toastError(\"Admin role required\");\n      return;\n    }\n    closeMenu();\n    resetCreateDatabaseForm();\n    createDatabaseSheetOpen = true;\n    loadClusters();\n  }\n\n  function openCreateTableSheet(database = \"\") {\n    if (!canManageSchema) {\n      toastError(\"Admin role required\");\n      return;\n    }\n    closeMenu();\n    resetCreateTableForm();\n    createTableForm = { ...createTableForm, database };\n    createTableSheetOpen = true;\n    loadClusters();\n    loadDataTypes();\n  }\n\n  function newUploadColumnDraft(\n    name = \"\",\n    type = \"String\",\n    sample = \"\",\n  ): UploadColumnDraft {\n    uploadColumnSeed += 1;\n    return {\n      id: `upload-col-${uploadColumnSeed}`,\n      name,\n      type,\n      sample,\n    };\n  }\n\n  function resetUploadForm() {\n    uploadSourceFile = null;\n    uploadSourceFormat = \"\";\n    uploadRowsDetected = 0;\n    uploadPreviewRows = [];\n    uploadColumns = [];\n    uploadErrorMessage = \"\";\n    uploadCreateSQL = \"\";\n    uploadInsertSQL = \"\";\n    uploadTables = [];\n    uploadForm = {\n      database: \"\",\n      mode: \"new\",\n      tableName: \"\",\n      existingTable: \"\",\n      engine: \"MergeTree\",\n      onCluster: \"\",\n      ifNotExists: true,\n      orderBy: \"\",\n      partitionBy: \"\",\n      primaryKey: \"\",\n      comment: \"\",\n    };\n  }\n\n  async function loadUploadTables(database: string) {\n    const dbName = database.trim();\n    if (!dbName) {\n      uploadTables = [];\n      return;\n    }\n    uploadTablesLoading = true;\n    try {\n      const response = await apiGet<{ tables: Array<{ name: string; engine: string }> }>(\n        `/api/query/tables?database=${encodeURIComponent(dbName)}`,\n      );\n      uploadTables = (response.tables ?? [])\n        .map(t => t.name)\n        .sort((a, b) => a.localeCompare(b));\n    } catch {\n      uploadTables = [];\n    } finally {\n      uploadTablesLoading = false;\n    }\n  }\n\n  function openUploadSheet(database = \"\") {\n    if (!canManageSchema) {\n      toastError(\"Admin role required\");\n      return;\n    }\n    closeMenu();\n    resetUploadForm();\n    uploadSheetOpen = true;\n    uploadForm = { ...uploadForm, database: database.trim() };\n    if (database.trim()) {\n      loadUploadTables(database);\n    }\n    loadClusters();\n    loadDataTypes();\n  }\n\n  function onUploadDatabaseChange(value: string) {\n    uploadForm = {\n      ...uploadForm,\n      database: value,\n      existingTable: \"\",\n    };\n    loadUploadTables(value);\n  }\n\n  function updateUploadColumn(id: string, patch: Partial<UploadColumnDraft>) {\n    uploadColumns = uploadColumns.map((col) =>\n      col.id === id ? { ...col, ...patch } : col,\n    );\n  }\n\n  function onUploadFileSelected(event: Event) {\n    const input = event.currentTarget as HTMLInputElement;\n    const file = input.files?.[0] ?? null;\n    uploadSourceFile = file;\n    uploadRowsDetected = 0;\n    uploadSourceFormat = \"\";\n    uploadPreviewRows = [];\n    uploadColumns = [];\n    uploadErrorMessage = \"\";\n    uploadCreateSQL = \"\";\n    uploadInsertSQL = \"\";\n  }\n\n  async function discoverUploadSchema() {\n    if (!uploadSourceFile) {\n      toastError(\"Select a file first\");\n      return;\n    }\n\n    uploadDiscovering = true;\n    try {\n      const formData = new FormData();\n      formData.append(\"file\", uploadSourceFile);\n      const response = await apiPostForm<{\n        format: string;\n        rows: number;\n        columns: Array<{ name: string; type: string; sample?: string }>;\n        preview: Array<Record<string, unknown>>;\n      }>(\"/api/query/upload/discover\", formData);\n\n      uploadSourceFormat = response.format ?? \"\";\n      uploadRowsDetected = response.rows ?? 0;\n      uploadPreviewRows = response.preview ?? [];\n      uploadErrorMessage = \"\";\n      uploadColumns = (response.columns ?? []).map((col) =>\n        newUploadColumnDraft(\n          col.name ?? \"\",\n          col.type ?? \"String\",\n          col.sample ?? \"\",\n        ),\n      );\n      toastSuccess(\"Schema discovered\");\n    } catch (e: any) {\n      toastError(e.message);\n    } finally {\n      uploadDiscovering = false;\n    }\n  }\n\n  async function submitUpload() {\n    if (!uploadSourceFile) {\n      toastError(\"Select a file to upload\");\n      return;\n    }\n\n    const dbName = uploadForm.database.trim();\n    if (!dbName) {\n      toastError(\"Database is required\");\n      return;\n    }\n\n    const tableName = (\n      uploadForm.mode === \"new\"\n        ? uploadForm.tableName\n        : uploadForm.existingTable\n    ).trim();\n    if (!tableName) {\n      toastError(\n        uploadForm.mode === \"new\"\n          ? \"Table name is required\"\n          : \"Select a target table\",\n      );\n      return;\n    }\n\n    if (uploadColumns.length === 0) {\n      toastError(\"Run schema discovery before uploading\");\n      return;\n    }\n\n    const normalizedColumns = uploadColumns.map((col) => ({\n      name: col.name.trim(),\n      type: col.type.trim(),\n    }));\n    if (normalizedColumns.some((col) => !col.name || !col.type)) {\n      toastError(\"Every discovered column requires name and type\");\n      return;\n    }\n\n    uploadSubmitting = true;\n    try {\n      const formData = new FormData();\n      formData.append(\"file\", uploadSourceFile);\n      formData.append(\"database\", dbName);\n      formData.append(\"table\", tableName);\n      formData.append(\n        \"create_table\",\n        uploadForm.mode === \"new\" ? \"true\" : \"false\",\n      );\n      formData.append(\"columns\", JSON.stringify(normalizedColumns));\n      if (uploadForm.mode === \"new\") {\n        formData.append(\"engine\", uploadForm.engine);\n        formData.append(\"on_cluster\", uploadForm.onCluster || \"\");\n        formData.append(\n          \"if_not_exists\",\n          uploadForm.ifNotExists ? \"true\" : \"false\",\n        );\n        formData.append(\"order_by\", uploadForm.orderBy.trim());\n        formData.append(\"partition_by\", uploadForm.partitionBy.trim());\n        formData.append(\"primary_key\", uploadForm.primaryKey.trim());\n        formData.append(\"comment\", uploadForm.comment.trim());\n      }\n\n      const result = await apiPostForm<{\n        rows_inserted: number;\n        created_table: boolean;\n        commands?: {\n          create_table?: string;\n          insert?: string;\n        };\n      }>(\"/api/query/upload/ingest\", formData);\n\n      uploadErrorMessage = \"\";\n      uploadCreateSQL = result.commands?.create_table ?? \"\";\n      uploadInsertSQL = result.commands?.insert ?? \"\";\n      toastSuccess(\n        `Uploaded ${result.rows_inserted ?? 0} rows into ${dbName}.${tableName}`,\n      );\n      uploadSheetOpen = false;\n      await loadTables(dbName);\n      selectTable(dbName, tableName);\n    } catch (e: any) {\n      uploadErrorMessage = e.message ?? \"Upload failed\";\n      toastError(\"Upload failed. See details in the sheet.\");\n    } finally {\n      uploadSubmitting = false;\n    }\n  }\n\n  function openDeleteDatabaseSheet(database: string) {\n    if (!canManageSchema) {\n      toastError(\"Admin role required\");\n      return;\n    }\n    closeMenu();\n    deleteDatabaseForm = {\n      name: database,\n      onCluster: \"\",\n      sync: true,\n      typedName: \"\",\n    };\n    deleteDatabaseSheetOpen = true;\n    loadClusters();\n  }\n\n  function openDeleteTableSheet(database: string, table: string) {\n    if (!canManageSchema) {\n      toastError(\"Admin role required\");\n      return;\n    }\n    closeMenu();\n    deleteTableForm = {\n      database,\n      name: table,\n      onCluster: \"\",\n      sync: true,\n      typedName: \"\",\n    };\n    deleteTableSheetOpen = true;\n    loadClusters();\n  }\n\n  function addTableColumn() {\n    createTableForm = {\n      ...createTableForm,\n      columns: [...createTableForm.columns, newColumnDraft()],\n    };\n  }\n\n  function removeTableColumn(id: string) {\n    if (createTableForm.columns.length <= 1) return;\n    createTableForm = {\n      ...createTableForm,\n      columns: createTableForm.columns.filter((col) => col.id !== id),\n    };\n  }\n\n  function updateTableColumn(id: string, patch: Partial<TableColumnDraft>) {\n    createTableForm = {\n      ...createTableForm,\n      columns: createTableForm.columns.map((col) =>\n        col.id === id ? { ...col, ...patch } : col,\n      ),\n    };\n  }\n\n  async function submitCreateDatabase() {\n    const name = createDatabaseForm.name.trim();\n    if (!name) {\n      toastError(\"Database name is required\");\n      return;\n    }\n\n    createDatabaseSubmitting = true;\n    try {\n      await apiPost(\"/api/query/schema/database\", {\n        name,\n        engine: createDatabaseForm.engine,\n        on_cluster: createDatabaseForm.onCluster || undefined,\n        if_not_exists: createDatabaseForm.ifNotExists,\n      });\n      toastSuccess(`Database \"${name}\" created`);\n      createDatabaseSheetOpen = false;\n      await loadDatabases();\n      await loadTables(name);\n      selectDatabase(name);\n    } catch (e: any) {\n      toastError(e.message);\n    } finally {\n      createDatabaseSubmitting = false;\n    }\n  }\n\n  async function submitCreateTable() {\n    const dbName = createTableForm.database.trim();\n    const tableName = createTableForm.name.trim();\n\n    if (!dbName) {\n      toastError(\"Database is required\");\n      return;\n    }\n    if (!tableName) {\n      toastError(\"Table name is required\");\n      return;\n    }\n\n    const normalizedColumns = createTableForm.columns.map((col) => ({\n      name: col.name.trim(),\n      type: col.type.trim(),\n      default_expression: col.defaultExpression.trim(),\n      comment: col.comment.trim(),\n    }));\n\n    if (normalizedColumns.some((col) => !col.name || !col.type)) {\n      toastError(\"Each column requires name and type\");\n      return;\n    }\n\n    createTableSubmitting = true;\n    createTableErrorMessage = \"\";\n    try {\n      const result = await apiPost<{ command?: string }>(\n        \"/api/query/schema/table\",\n        {\n          database: dbName,\n          name: tableName,\n          engine: createTableForm.engine,\n          on_cluster: createTableForm.onCluster || undefined,\n          if_not_exists: createTableForm.ifNotExists,\n          columns: normalizedColumns,\n          order_by: createTableForm.orderBy.trim(),\n          partition_by: createTableForm.partitionBy.trim(),\n          primary_key: createTableForm.primaryKey.trim(),\n          sample_by: createTableForm.sampleBy.trim(),\n          ttl: createTableForm.ttl.trim(),\n          settings: createTableForm.settings.trim(),\n          comment: createTableForm.comment.trim(),\n        },\n      );\n      const command = result?.command?.trim();\n      toastSuccess(`Table \"${dbName}.${tableName}\" created`);\n      if (command) {\n        toastSuccess(\"Command available in audit/details\");\n      }\n      createTableSheetOpen = false;\n      await loadTables(dbName);\n      selectTable(dbName, tableName);\n    } catch (e: any) {\n      createTableErrorMessage = e.message ?? \"Create table failed\";\n      toastError(\"Create table failed. See details in the sheet.\");\n    } finally {\n      createTableSubmitting = false;\n    }\n  }\n\n  function escapeIdentifierForPreview(input: string): string {\n    return `\\`${input.replace(/`/g, \"``\")}\\``;\n  }\n\n  function escapeLiteralForPreview(input: string): string {\n    return input.replace(/\\\\/g, \"\\\\\\\\\").replace(/'/g, \"\\\\'\");\n  }\n\n  function buildCreateTableCommandPreview(): string {\n    const dbName = createTableForm.database.trim();\n    const tableName = createTableForm.name.trim();\n    const target =\n      dbName && tableName\n        ? `${escapeIdentifierForPreview(dbName)}.${escapeIdentifierForPreview(tableName)}`\n        : \"`database`.`table`\";\n\n    const parts: string[] = [\"CREATE TABLE\"];\n    if (createTableForm.ifNotExists) parts.push(\"IF NOT EXISTS\");\n    parts.push(target);\n\n    if (createTableForm.onCluster.trim()) {\n      parts.push(\n        `ON CLUSTER ${escapeIdentifierForPreview(createTableForm.onCluster.trim())}`,\n      );\n    }\n\n    const normalizedColumns = createTableForm.columns\n      .map((col) => ({\n        name: col.name.trim(),\n        type: col.type.trim(),\n        defaultExpression: col.defaultExpression.trim(),\n        comment: col.comment.trim(),\n      }))\n      .filter((col) => col.name && col.type);\n\n    const columnLines = normalizedColumns.map((col) => {\n      let line = `${escapeIdentifierForPreview(col.name)} ${col.type}`;\n      if (col.defaultExpression) line += ` DEFAULT ${col.defaultExpression}`;\n      if (col.comment)\n        line += ` COMMENT '${escapeLiteralForPreview(col.comment)}'`;\n      return line;\n    });\n\n    const lines: string[] = [];\n    lines.push(`${parts.join(\" \")} (`);\n    lines.push(`  ${columnLines.join(\",\\n  \")}`);\n    lines.push(\")\");\n    lines.push(`ENGINE = ${createTableForm.engine.trim() || \"MergeTree\"}`);\n\n    if (createTableForm.partitionBy.trim())\n      lines.push(`PARTITION BY ${createTableForm.partitionBy.trim()}`);\n    if (createTableForm.orderBy.trim())\n      lines.push(`ORDER BY ${createTableForm.orderBy.trim()}`);\n    if (createTableForm.primaryKey.trim())\n      lines.push(`PRIMARY KEY ${createTableForm.primaryKey.trim()}`);\n    if (createTableForm.sampleBy.trim())\n      lines.push(`SAMPLE BY ${createTableForm.sampleBy.trim()}`);\n    if (createTableForm.ttl.trim())\n      lines.push(`TTL ${createTableForm.ttl.trim()}`);\n    if (createTableForm.settings.trim())\n      lines.push(`SETTINGS ${createTableForm.settings.trim()}`);\n    if (createTableForm.comment.trim())\n      lines.push(\n        `COMMENT '${escapeLiteralForPreview(createTableForm.comment.trim())}'`,\n      );\n\n    return lines.join(\"\\n\");\n  }\n\n  async function submitDeleteDatabase() {\n    if (deleteDatabaseForm.typedName.trim() !== deleteDatabaseForm.name) {\n      toastError(\"Type the exact database name to confirm deletion\");\n      return;\n    }\n\n    deleteDatabaseSubmitting = true;\n    try {\n      await apiPost(\"/api/query/schema/database/drop\", {\n        name: deleteDatabaseForm.name,\n        on_cluster: deleteDatabaseForm.onCluster || undefined,\n        if_exists: true,\n        sync: deleteDatabaseForm.sync,\n      });\n      toastSuccess(`Database \"${deleteDatabaseForm.name}\" deleted`);\n      deleteDatabaseSheetOpen = false;\n      await loadDatabases();\n    } catch (e: any) {\n      toastError(e.message);\n    } finally {\n      deleteDatabaseSubmitting = false;\n    }\n  }\n\n  async function submitDeleteTable() {\n    const qualified = `${deleteTableForm.database}.${deleteTableForm.name}`;\n    if (deleteTableForm.typedName.trim() !== qualified) {\n      toastError(`Type \"${qualified}\" to confirm deletion`);\n      return;\n    }\n\n    deleteTableSubmitting = true;\n    try {\n      await apiPost(\"/api/query/schema/table/drop\", {\n        database: deleteTableForm.database,\n        name: deleteTableForm.name,\n        on_cluster: deleteTableForm.onCluster || undefined,\n        if_exists: true,\n        sync: deleteTableForm.sync,\n      });\n      toastSuccess(`Table \"${qualified}\" deleted`);\n      deleteTableSheetOpen = false;\n      await loadTables(deleteTableForm.database);\n    } catch (e: any) {\n      toastError(e.message);\n    } finally {\n      deleteTableSubmitting = false;\n    }\n  }\n\n  function getExplorerMenuItems(): ContextMenuItem[] {\n    if (!menu) return [];\n    const target = menu.target;\n\n    if (target.kind === \"root\") {\n      return [\n        {\n          id: \"root-create-db\",\n          label: \"Create Database...\",\n          icon: FolderPlus,\n          disabled: !canManageSchema,\n          onSelect: () => openCreateDatabaseSheet(),\n        },\n        {\n          id: \"root-create-table\",\n          label: \"Create Table...\",\n          icon: TableProperties,\n          disabled: databases.length === 0 || !canManageSchema,\n          onSelect: () => openCreateTableSheet(),\n        },\n        {\n          id: \"root-upload\",\n          label: \"Upload Data...\",\n          icon: Upload,\n          disabled: databases.length === 0 || !canManageSchema,\n          onSelect: () => openUploadSheet(),\n        },\n        { id: \"sep\", separator: true },\n        {\n          id: \"root-toggle-system\",\n          label: showSystemDbs\n            ? \"Hide System Databases\"\n            : \"Show System Databases\",\n          icon: Database,\n          onSelect: () => {\n            showSystemDbs = !showSystemDbs;\n            closeMenu();\n          },\n        },\n        { id: \"sep2\", separator: true },\n        {\n          id: \"root-refresh\",\n          label: \"Refresh Schema\",\n          icon: RefreshCw,\n          onSelect: async () => {\n            closeMenu();\n            await loadDatabases();\n          },\n        },\n      ];\n    }\n\n    if (target.kind === \"database\") {\n      const isSystem = isSystemDatabase(target.database);\n      return [\n        {\n          id: \"db-info\",\n          label: \"View Database Info\",\n          icon: Info,\n          shortcut: \"Enter\",\n          onSelect: () => viewDatabaseInfo(target.database),\n        },\n        {\n          id: \"db-query\",\n          label: \"Run Show Tables\",\n          icon: Play,\n          shortcut: \"Cmd/Ctrl+Enter\",\n          onSelect: () => queryDatabase(target.database),\n        },\n        {\n          id: \"db-refresh\",\n          label: \"Refresh Tables\",\n          icon: RefreshCw,\n          shortcut: \"R\",\n          onSelect: () => refreshDatabase(target.database),\n        },\n        { id: \"sep1\", separator: true },\n        {\n          id: \"db-create-table\",\n          label: \"Create Table...\",\n          icon: TableProperties,\n          disabled: isSystem || !canManageSchema,\n          onSelect: () => openCreateTableSheet(target.database),\n        },\n        {\n          id: \"db-upload\",\n          label: \"Upload Data...\",\n          icon: Upload,\n          disabled: isSystem || !canManageSchema,\n          onSelect: () => openUploadSheet(target.database),\n        },\n        {\n          id: \"db-delete\",\n          label: \"Delete Database...\",\n          icon: Trash2,\n          danger: true,\n          disabled: isSystem || !canManageSchema,\n          onSelect: () => openDeleteDatabaseSheet(target.database),\n        },\n        { id: \"sep2\", separator: true },\n        {\n          id: \"db-copy\",\n          label: \"Copy Database Name\",\n          icon: Copy,\n          onSelect: () => copyToClipboard(target.database),\n        },\n      ];\n    }\n\n    return [\n      {\n        id: \"table-info\",\n        label: \"View Table Info\",\n        icon: Info,\n        shortcut: \"Enter\",\n        onSelect: () => viewTableInfo(target.database, target.table),\n      },\n      {\n        id: \"table-query\",\n        label: \"Query Table\",\n        icon: Play,\n        shortcut: \"Cmd/Ctrl+Enter\",\n        onSelect: () => queryTable(target.database, target.table),\n      },\n      { id: \"sep1\", separator: true },\n      {\n        id: \"table-delete\",\n        label: \"Delete Table...\",\n        icon: Trash2,\n        danger: true,\n        disabled: isSystemDatabase(target.database) || !canManageSchema,\n        onSelect: () => openDeleteTableSheet(target.database, target.table),\n      },\n      { id: \"sep2\", separator: true },\n      {\n        id: \"table-copy\",\n        label: \"Copy Qualified Name\",\n        icon: Copy,\n        onSelect: () =>\n          copyToClipboard(`\\`${target.database}\\`.\\`${target.table}\\``),\n      },\n    ];\n  }\n</script>\n\n<svelte:window onkeydown={(e) => e.key === \"Escape\" && closeMenu()} />\n\n<!-- svelte-ignore a11y_no_static_element_interactions -->\n<div\n  class=\"flex flex-col h-full text-[13px]\"\n  oncontextmenu={(e) => openContextMenu(e, { kind: \"root\" })}\n>\n  <div class=\"px-2.5 py-2 border-b border-gray-200 dark:border-gray-800\">\n    <div class=\"flex items-center gap-1.5\">\n      <div\n        class=\"flex items-center gap-1.5 px-2.5 py-1.5 bg-gray-200/60 dark:bg-gray-800/60 rounded-md border border-gray-300/50 dark:border-gray-700/50 focus-within:border-gray-400 dark:focus-within:border-gray-600 flex-1\"\n      >\n        <Search size={13} class=\"text-gray-500 shrink-0\" />\n        <input\n          type=\"text\"\n          placeholder=\"Filter databases and tables...\"\n          class=\"flex-1 bg-transparent text-[13px] text-gray-700 dark:text-gray-300 placeholder-gray-400 dark:placeholder-gray-600 outline-none\"\n          bind:value={searchTerm}\n        />\n        {#if searchTerm}\n          <button\n            class=\"text-gray-500 hover:text-gray-700 dark:hover:text-gray-300\"\n            onclick={() => (searchTerm = \"\")}\n          >\n            <X size={13} />\n          </button>\n        {/if}\n      </div>\n\n      <button\n        class=\"ds-btn-outline h-[34px] px-2 shrink-0\"\n        onclick={openSchemaActionsMenu}\n        title=\"Schema actions\"\n      >\n        <MoreHorizontal size={14} />\n      </button>\n      <button\n        class=\"ds-btn-outline h-[34px] px-2 shrink-0\"\n        onclick={refreshSchema}\n        title=\"Refresh schema\"\n        disabled={loading}\n      >\n        <RefreshCw size={14} class={loading ? \"animate-spin\" : \"\"} />\n      </button>\n    </div>\n\n    {#if !canManageSchema}\n      <p class=\"mt-1 px-0.5 text-[10px] text-gray-500\">\n        Schema create/delete actions require admin role\n      </p>\n    {/if}\n  </div>\n\n  <div class=\"flex-1 overflow-auto\">\n    {#if loading && databases.length === 0}\n      <div class=\"flex items-center justify-center py-8\">\n        <Spinner size=\"sm\" />\n      </div>\n    {:else if filteredDatabases.length === 0}\n      <div\n        class=\"flex items-center justify-center py-8 text-xs text-gray-400 dark:text-gray-600\"\n      >\n        {searchTerm ? \"No matches\" : \"No databases\"}\n      </div>\n    {:else}\n      {#each filteredDatabases as db}\n        <div>\n          <div\n            class=\"group flex items-center gap-1.5 w-full px-2.5 py-1.5 text-left hover:bg-gray-200/50 dark:hover:bg-gray-800/50 text-gray-700 dark:text-gray-300\"\n            oncontextmenu={(e) =>\n              openContextMenu(e, { kind: \"database\", database: db.name })}\n          >\n            <button\n              class=\"shrink-0 p-0.5\"\n              onclick={() => toggleDatabase(db.name)}\n            >\n              {#if db.expanded}\n                <ChevronDown size={15} class=\"text-gray-500 shrink-0\" />\n              {:else}\n                <ChevronRight size={15} class=\"text-gray-500 shrink-0\" />\n              {/if}\n            </button>\n            <button\n              class=\"flex items-center gap-1.5 flex-1 min-w-0 text-left\"\n              onclick={() => toggleDatabase(db.name)}\n            >\n              <Database size={15} class=\"text-ch-blue shrink-0\" />\n              <span class=\"truncate\">{db.name}</span>\n              {#if db.loading}\n                <Spinner size=\"sm\" class=\"ml-auto\" />\n              {/if}\n            </button>\n            <button\n              class=\"shrink-0 p-0.5 rounded text-gray-400 dark:text-gray-600 hover:text-gray-700 dark:hover:text-gray-300 opacity-0 group-hover:opacity-100 transition-opacity\"\n              onclick={(e) =>\n                openContextMenu(e, { kind: \"database\", database: db.name })}\n            >\n              <MoreHorizontal size={15} />\n            </button>\n          </div>\n\n          {#if db.expanded && db.tables}\n            {#each db.tables as table}\n              <div>\n                <div\n                  class=\"group flex items-center w-full pl-7 pr-1.5 py-1.5 text-gray-500 dark:text-gray-400 hover:bg-gray-200/50 dark:hover:bg-gray-800/50\"\n                  oncontextmenu={(e) =>\n                    openContextMenu(e, {\n                      kind: \"table\",\n                      database: db.name,\n                      table: table.name,\n                    })}\n                >\n                  <button\n                    class=\"shrink-0 p-0.5\"\n                    onclick={() => toggleTable(db.name, table.name)}\n                  >\n                    {#if table.expanded}\n                      <ChevronDown\n                        size={13}\n                        class=\"text-gray-400 dark:text-gray-600\"\n                      />\n                    {:else}\n                      <ChevronRight\n                        size={13}\n                        class=\"text-gray-400 dark:text-gray-600\"\n                      />\n                    {/if}\n                  </button>\n                  <button\n                    class=\"flex items-center gap-1.5 flex-1 min-w-0 text-left\"\n                    onclick={() => selectTable(db.name, table.name)}\n                  >\n                    {#if getTableType(table.engine) === \"view\"}\n                      <Eye size={14} class=\"text-blue-400 shrink-0\" />\n                    {:else if getTableType(table.engine) === \"materialized-view\"}\n                      <Layers size={14} class=\"text-purple-400 shrink-0\" />\n                    {:else}\n                      <Table2 size={14} class=\"text-gray-500 shrink-0\" />\n                    {/if}\n                    <span class=\"truncate\">{table.name}</span>\n                    {#if getTableTypeLabel(table.engine)}\n                      <span class=\"ml-1 text-[9px] font-medium px-1 py-0.5 rounded bg-gray-200/60 dark:bg-gray-700/60 text-gray-500 dark:text-gray-400 shrink-0\"\n                        >{getTableTypeLabel(table.engine)}</span\n                      >\n                    {/if}\n                    {#if table.loading}\n                      <Spinner size=\"sm\" class=\"ml-auto\" />\n                    {/if}\n                  </button>\n\n                  <button\n                    class=\"shrink-0 p-0.5 rounded text-gray-400 dark:text-gray-600 hover:text-gray-700 dark:hover:text-gray-300 opacity-0 group-hover:opacity-100 transition-opacity\"\n                    onclick={(e) =>\n                      openContextMenu(e, {\n                        kind: \"table\",\n                        database: db.name,\n                        table: table.name,\n                      })}\n                  >\n                    <MoreHorizontal size={15} />\n                  </button>\n                </div>\n\n                {#if table.expanded && table.columns}\n                  {#each table.columns as col}\n                    <div\n                      class=\"flex items-center gap-1.5 pl-12 pr-2 py-0.5 text-[11px] text-gray-500\"\n                    >\n                      <Columns3 size={12} class=\"shrink-0\" />\n                      <span class=\"truncate\">{col.name}</span>\n                      <span\n                        class=\"ml-auto text-gray-400 dark:text-gray-600 truncate\"\n                        >{col.type}</span\n                      >\n                    </div>\n                  {/each}\n                {/if}\n              </div>\n            {/each}\n          {/if}\n        </div>\n      {/each}\n    {/if}\n  </div>\n</div>\n\n<ContextMenu\n  open={!!menu}\n  x={menu?.x ?? 0}\n  y={menu?.y ?? 0}\n  items={getExplorerMenuItems()}\n  onclose={closeMenu}\n/>\n\n<Sheet\n  open={createDatabaseSheetOpen}\n  title=\"Create Database\"\n  size=\"md\"\n  onclose={() => (createDatabaseSheetOpen = false)}\n>\n  <form\n    class=\"space-y-4\"\n    onsubmit={(e) => {\n      e.preventDefault();\n      submitCreateDatabase();\n    }}\n  >\n    <div class=\"grid gap-4 md:grid-cols-2\">\n      <div>\n        <div class=\"ds-form-label\">Database Name</div>\n        <input\n          class=\"ds-input\"\n          placeholder=\"analytics\"\n          bind:value={createDatabaseForm.name}\n        />\n      </div>\n      <div>\n        <div class=\"ds-form-label\">Database Engine</div>\n        <Combobox\n          options={databaseEngineOptions}\n          value={createDatabaseForm.engine}\n          onChange={(value) =>\n            (createDatabaseForm = { ...createDatabaseForm, engine: value })}\n          placeholder=\"Select engine\"\n        />\n      </div>\n    </div>\n\n    <div class=\"grid gap-4 md:grid-cols-2\">\n      <div>\n        <div class=\"ds-form-label\">Cluster (optional)</div>\n        <Combobox\n          options={clusterOptions}\n          value={createDatabaseForm.onCluster}\n          onChange={(value) =>\n            (createDatabaseForm = { ...createDatabaseForm, onCluster: value })}\n          placeholder={clustersLoading ? \"Loading clusters...\" : \"No cluster\"}\n          disabled={clustersLoading}\n        />\n      </div>\n      <div class=\"flex items-end pb-2\">\n        <label class=\"ds-checkbox-label\">\n          <input\n            type=\"checkbox\"\n            class=\"ds-checkbox\"\n            bind:checked={createDatabaseForm.ifNotExists}\n          />\n          IF NOT EXISTS\n        </label>\n      </div>\n    </div>\n\n    <div class=\"ds-panel-muted p-3 text-xs text-gray-600 dark:text-gray-300\">\n      Creates a database with selected engine. If cluster is set, operation runs\n      with <code>ON CLUSTER</code>.\n    </div>\n\n    <div class=\"flex items-center justify-end gap-2 pt-1\">\n      <button\n        type=\"button\"\n        class=\"ds-btn-outline\"\n        onclick={() => (createDatabaseSheetOpen = false)}>Cancel</button\n      >\n      <button\n        type=\"submit\"\n        class=\"ds-btn-primary\"\n        disabled={createDatabaseSubmitting}\n      >\n        <FolderPlus size={14} />\n        {createDatabaseSubmitting ? \"Creating...\" : \"Create Database\"}\n      </button>\n    </div>\n  </form>\n</Sheet>\n\n<Sheet\n  open={createTableSheetOpen}\n  title=\"Create Table\"\n  size=\"xl\"\n  onclose={() => (createTableSheetOpen = false)}\n>\n  <form\n    class=\"space-y-4\"\n    onsubmit={(e) => {\n      e.preventDefault();\n      submitCreateTable();\n    }}\n  >\n    <div class=\"grid gap-4 md:grid-cols-2 lg:grid-cols-4\">\n      <div>\n        <div class=\"ds-form-label\">Database</div>\n        <Combobox\n          options={databaseOptions}\n          value={createTableForm.database}\n          onChange={(value) =>\n            (createTableForm = { ...createTableForm, database: value })}\n          placeholder=\"Select database\"\n        />\n      </div>\n      <div>\n        <div class=\"ds-form-label\">Table Name</div>\n        <input\n          class=\"ds-input\"\n          placeholder=\"events\"\n          bind:value={createTableForm.name}\n        />\n      </div>\n      <div>\n        <div class=\"ds-form-label\">Engine</div>\n        <Combobox\n          options={tableEngineOptions}\n          value={createTableForm.engine}\n          onChange={(value) =>\n            (createTableForm = { ...createTableForm, engine: value })}\n          placeholder=\"Select engine\"\n        />\n      </div>\n      <div>\n        <div class=\"ds-form-label\">Cluster (optional)</div>\n        <Combobox\n          options={clusterOptions}\n          value={createTableForm.onCluster}\n          onChange={(value) =>\n            (createTableForm = { ...createTableForm, onCluster: value })}\n          placeholder={clustersLoading ? \"Loading clusters...\" : \"No cluster\"}\n          disabled={clustersLoading}\n        />\n      </div>\n    </div>\n\n    <div class=\"ds-panel p-3 space-y-3\">\n      <div class=\"flex items-center justify-between gap-2\">\n        <div>\n          <h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">\n            Columns\n          </h3>\n          <p class=\"text-[11px] text-gray-500\">\n            Type is selected from ClickHouse data types (parametric families are\n            prefilled with valid templates).\n          </p>\n        </div>\n        <button type=\"button\" class=\"ds-btn-outline\" onclick={addTableColumn}>\n          <Plus size={13} />\n          Add Column\n        </button>\n      </div>\n      <div class=\"overflow-x-auto overflow-y-visible pb-1\">\n        <table class=\"ds-table min-w-[760px]\">\n          <thead>\n            <tr class=\"ds-table-head-row\">\n              <th class=\"ds-table-th\">Name</th>\n              <th class=\"ds-table-th\">Type</th>\n              <th class=\"ds-table-th\">Default Expression</th>\n              <th class=\"ds-table-th\">Comment</th>\n              <th class=\"ds-table-th-right\">Actions</th>\n            </tr>\n          </thead>\n          <tbody>\n            {#each createTableForm.columns as col}\n              <tr class=\"ds-table-row-static\">\n                <td class=\"py-2 px-3 align-top\">\n                  <input\n                    class=\"ds-input-sm\"\n                    value={col.name}\n                    oninput={(e) =>\n                      updateTableColumn(col.id, {\n                        name: (e.currentTarget as HTMLInputElement).value,\n                      })}\n                    placeholder=\"column_name\"\n                  />\n                </td>\n                <td class=\"py-2 px-3 align-top\">\n                  <Combobox\n                    options={dataTypeOptionsFor(col.type)}\n                    value={col.type}\n                    onChange={(value) =>\n                      updateTableColumn(col.id, { type: value })}\n                    placeholder={dataTypesLoading\n                      ? \"Loading types...\"\n                      : \"Select type\"}\n                    disabled={dataTypesLoading}\n                  />\n                </td>\n                <td class=\"py-2 px-3 align-top\">\n                  <input\n                    class=\"ds-input-sm\"\n                    value={col.defaultExpression}\n                    oninput={(e) =>\n                      updateTableColumn(col.id, {\n                        defaultExpression: (e.currentTarget as HTMLInputElement)\n                          .value,\n                      })}\n                    placeholder=\"now()\"\n                  />\n                </td>\n                <td class=\"py-2 px-3 align-top\">\n                  <input\n                    class=\"ds-input-sm\"\n                    value={col.comment}\n                    oninput={(e) =>\n                      updateTableColumn(col.id, {\n                        comment: (e.currentTarget as HTMLInputElement).value,\n                      })}\n                    placeholder=\"Business meaning\"\n                  />\n                </td>\n                <td class=\"py-2 px-3 align-top text-right\">\n                  <button\n                    type=\"button\"\n                    class=\"ds-btn-ghost text-red-500 hover:text-red-600\"\n                    onclick={() => removeTableColumn(col.id)}\n                    disabled={createTableForm.columns.length <= 1}\n                  >\n                    <Trash2 size={13} />\n                  </button>\n                </td>\n              </tr>\n            {/each}\n          </tbody>\n        </table>\n      </div>\n    </div>\n\n    <div class=\"grid gap-4 md:grid-cols-2 lg:grid-cols-3\">\n      <div>\n        <div class=\"ds-form-label\">ORDER BY</div>\n        <input\n          class=\"ds-input\"\n          placeholder=\"tuple()\"\n          bind:value={createTableForm.orderBy}\n        />\n      </div>\n      <div>\n        <div class=\"ds-form-label\">PARTITION BY</div>\n        <input\n          class=\"ds-input\"\n          placeholder=\"toYYYYMM(created_at)\"\n          bind:value={createTableForm.partitionBy}\n        />\n      </div>\n      <div>\n        <div class=\"ds-form-label\">PRIMARY KEY</div>\n        <input\n          class=\"ds-input\"\n          placeholder=\"id\"\n          bind:value={createTableForm.primaryKey}\n        />\n      </div>\n      <div>\n        <div class=\"ds-form-label\">SAMPLE BY</div>\n        <input\n          class=\"ds-input\"\n          placeholder=\"cityHash64(id)\"\n          bind:value={createTableForm.sampleBy}\n        />\n      </div>\n      <div>\n        <div class=\"ds-form-label\">TTL</div>\n        <input\n          class=\"ds-input\"\n          placeholder=\"created_at + INTERVAL 90 DAY\"\n          bind:value={createTableForm.ttl}\n        />\n      </div>\n      <div>\n        <div class=\"ds-form-label\">SETTINGS</div>\n        <input\n          class=\"ds-input\"\n          placeholder=\"index_granularity = 8192\"\n          bind:value={createTableForm.settings}\n        />\n      </div>\n    </div>\n\n    <div class=\"grid gap-4 md:grid-cols-2\">\n      <div>\n        <div class=\"ds-form-label\">Table Comment (optional)</div>\n        <input\n          class=\"ds-input\"\n          placeholder=\"Fact table for product analytics\"\n          bind:value={createTableForm.comment}\n        />\n      </div>\n      <div class=\"flex items-end pb-2\">\n        <label class=\"ds-checkbox-label\">\n          <input\n            type=\"checkbox\"\n            class=\"ds-checkbox\"\n            bind:checked={createTableForm.ifNotExists}\n          />\n          IF NOT EXISTS\n        </label>\n      </div>\n    </div>\n\n    <div class=\"ds-panel-muted p-3 space-y-1\">\n      <div class=\"text-xs font-semibold text-gray-700 dark:text-gray-200\">\n        Command Preview\n      </div>\n      <pre\n        class=\"text-[11px] max-h-36 overflow-auto whitespace-pre-wrap break-all text-gray-600 dark:text-gray-300\">{buildCreateTableCommandPreview()}</pre>\n    </div>\n\n    {#if createTableErrorMessage}\n      <div class=\"rounded-lg border border-red-500/40 bg-red-500/10 p-3\">\n        <div class=\"text-xs font-semibold text-red-200 mb-1\">\n          Create Table Error\n        </div>\n        <pre\n          class=\"text-[11px] whitespace-pre-wrap break-words max-h-36 overflow-auto text-red-100\">{createTableErrorMessage}</pre>\n      </div>\n    {/if}\n\n    <div class=\"flex items-center justify-end gap-2 pt-1\">\n      <button\n        type=\"button\"\n        class=\"ds-btn-outline\"\n        onclick={() => (createTableSheetOpen = false)}>Cancel</button\n      >\n      <button\n        type=\"submit\"\n        class=\"ds-btn-primary\"\n        disabled={createTableSubmitting}\n      >\n        <TableProperties size={14} />\n        {createTableSubmitting ? \"Creating...\" : \"Create Table\"}\n      </button>\n    </div>\n  </form>\n</Sheet>\n\n<Sheet\n  open={deleteDatabaseSheetOpen}\n  title=\"Delete Database\"\n  size=\"md\"\n  onclose={() => (deleteDatabaseSheetOpen = false)}\n>\n  <form\n    class=\"space-y-4\"\n    onsubmit={(e) => {\n      e.preventDefault();\n      submitDeleteDatabase();\n    }}\n  >\n    <div\n      class=\"rounded-lg border border-red-500/40 bg-red-500/10 p-3 text-sm text-red-200 flex items-start gap-2\"\n    >\n      <AlertTriangle size={16} class=\"mt-0.5 text-red-400\" />\n      <div>\n        This will permanently delete <strong>{deleteDatabaseForm.name}</strong> and\n        all tables inside it.\n      </div>\n    </div>\n\n    <div class=\"grid gap-4 md:grid-cols-2\">\n      <div>\n        <div class=\"ds-form-label\">Cluster (optional)</div>\n        <Combobox\n          options={clusterOptions}\n          value={deleteDatabaseForm.onCluster}\n          onChange={(value) =>\n            (deleteDatabaseForm = { ...deleteDatabaseForm, onCluster: value })}\n          placeholder={clustersLoading ? \"Loading clusters...\" : \"No cluster\"}\n          disabled={clustersLoading}\n        />\n      </div>\n      <div class=\"flex items-end pb-2\">\n        <label class=\"ds-checkbox-label\">\n          <input\n            type=\"checkbox\"\n            class=\"ds-checkbox\"\n            bind:checked={deleteDatabaseForm.sync}\n          />\n          Use SYNC drop\n        </label>\n      </div>\n    </div>\n\n    <div>\n      <div class=\"ds-form-label\">Type database name to confirm</div>\n      <input\n        class=\"ds-input\"\n        bind:value={deleteDatabaseForm.typedName}\n        placeholder={deleteDatabaseForm.name}\n      />\n    </div>\n\n    <div class=\"flex items-center justify-end gap-2 pt-1\">\n      <button\n        type=\"button\"\n        class=\"ds-btn-outline\"\n        onclick={() => (deleteDatabaseSheetOpen = false)}>Cancel</button\n      >\n      <button\n        type=\"submit\"\n        class=\"inline-flex items-center justify-center gap-1.5 rounded px-3 py-1.5 text-[13px] font-medium text-white bg-red-600 border border-red-500 transition-colors hover:bg-red-700 disabled:opacity-60 disabled:cursor-not-allowed\"\n        disabled={deleteDatabaseSubmitting ||\n          deleteDatabaseForm.typedName.trim() !== deleteDatabaseForm.name}\n      >\n        <Trash2 size={14} />\n        {deleteDatabaseSubmitting ? \"Deleting...\" : \"Delete Database\"}\n      </button>\n    </div>\n  </form>\n</Sheet>\n\n<Sheet\n  open={uploadSheetOpen}\n  title=\"Upload Data\"\n  size=\"xl\"\n  onclose={() => (uploadSheetOpen = false)}\n>\n  <form\n    class=\"space-y-4\"\n    onsubmit={(e) => {\n      e.preventDefault();\n      submitUpload();\n    }}\n  >\n    <div class=\"ds-panel p-3 space-y-3\">\n      <div class=\"flex items-start justify-between gap-3\">\n        <div>\n          <h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">\n            Source File\n          </h3>\n          <p class=\"text-[11px] text-gray-500\">\n            Accepted formats: CSV, Parquet, JSON, JSONL.\n          </p>\n        </div>\n        <button\n          type=\"button\"\n          class=\"ds-btn-outline\"\n          onclick={discoverUploadSchema}\n          disabled={!uploadSourceFile || uploadDiscovering}\n        >\n          <Upload size={13} />\n          {uploadDiscovering ? \"Discovering...\" : \"Discover Schema\"}\n        </button>\n      </div>\n\n      <input\n        type=\"file\"\n        class=\"ds-input\"\n        accept={uploadAccept}\n        onchange={onUploadFileSelected}\n      />\n\n      <div class=\"grid gap-3 md:grid-cols-3\">\n        <div class=\"ds-panel-muted p-2.5\">\n          <div class=\"text-[10px] uppercase tracking-wide text-gray-500\">\n            Rows detected\n          </div>\n          <div\n            class=\"mt-0.5 text-sm font-semibold text-gray-800 dark:text-gray-200\"\n          >\n            {uploadRowsDetected || \"—\"}\n          </div>\n        </div>\n        <div class=\"ds-panel-muted p-2.5\">\n          <div class=\"text-[10px] uppercase tracking-wide text-gray-500\">\n            Format\n          </div>\n          <div\n            class=\"mt-0.5 text-sm font-semibold text-gray-800 dark:text-gray-200\"\n          >\n            {uploadSourceFormat || \"—\"}\n          </div>\n        </div>\n        <div class=\"ds-panel-muted p-2.5\">\n          <div class=\"text-[10px] uppercase tracking-wide text-gray-500\">\n            Columns\n          </div>\n          <div\n            class=\"mt-0.5 text-sm font-semibold text-gray-800 dark:text-gray-200\"\n          >\n            {uploadColumns.length || \"—\"}\n          </div>\n        </div>\n      </div>\n    </div>\n\n    <div class=\"ds-panel p-3 space-y-3\">\n      <div class=\"flex flex-wrap items-center justify-between gap-2\">\n        <h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">\n          Target\n        </h3>\n        <div\n          class=\"inline-flex rounded-lg border border-gray-300 dark:border-gray-700 overflow-hidden\"\n        >\n          <button\n            type=\"button\"\n            class=\"px-3 py-1.5 text-xs {uploadForm.mode === 'new'\n              ? 'bg-ch-blue/20 text-ch-blue'\n              : 'text-gray-500 hover:bg-gray-200/55 dark:hover:bg-gray-800/55'}\"\n            onclick={() => (uploadForm = { ...uploadForm, mode: \"new\" })}\n          >\n            Create New Table\n          </button>\n          <button\n            type=\"button\"\n            class=\"px-3 py-1.5 text-xs border-l border-gray-300 dark:border-gray-700 {uploadForm.mode ===\n            'existing'\n              ? 'bg-ch-blue/20 text-ch-blue'\n              : 'text-gray-500 hover:bg-gray-200/55 dark:hover:bg-gray-800/55'}\"\n            onclick={() => (uploadForm = { ...uploadForm, mode: \"existing\" })}\n          >\n            Existing Table\n          </button>\n        </div>\n      </div>\n\n      <div class=\"grid gap-4 md:grid-cols-2 lg:grid-cols-4\">\n        <div>\n          <div class=\"ds-form-label\">Database</div>\n          <Combobox\n            options={databaseOptions}\n            value={uploadForm.database}\n            onChange={onUploadDatabaseChange}\n            placeholder=\"Select database\"\n          />\n        </div>\n\n        {#if uploadForm.mode === \"new\"}\n          <div>\n            <div class=\"ds-form-label\">Table Name</div>\n            <input\n              class=\"ds-input\"\n              bind:value={uploadForm.tableName}\n              placeholder=\"events_upload\"\n            />\n          </div>\n          <div>\n            <div class=\"ds-form-label\">Engine</div>\n            <Combobox\n              options={tableEngineOptions}\n              value={uploadForm.engine}\n              onChange={(value) =>\n                (uploadForm = { ...uploadForm, engine: value })}\n              placeholder=\"Select engine\"\n            />\n          </div>\n          <div>\n            <div class=\"ds-form-label\">Cluster (optional)</div>\n            <Combobox\n              options={clusterOptions}\n              value={uploadForm.onCluster}\n              onChange={(value) =>\n                (uploadForm = { ...uploadForm, onCluster: value })}\n              placeholder={clustersLoading\n                ? \"Loading clusters...\"\n                : \"No cluster\"}\n              disabled={clustersLoading}\n            />\n          </div>\n        {:else}\n          <div class=\"md:col-span-3\">\n            <div class=\"ds-form-label\">Target Table</div>\n            <Combobox\n              options={uploadTableOptions}\n              value={uploadForm.existingTable}\n              onChange={(value) =>\n                (uploadForm = { ...uploadForm, existingTable: value })}\n              placeholder={uploadTablesLoading\n                ? \"Loading tables...\"\n                : \"Select table\"}\n              disabled={uploadTablesLoading || !uploadForm.database}\n            />\n          </div>\n        {/if}\n      </div>\n\n      {#if uploadForm.mode === \"new\"}\n        <div class=\"grid gap-4 md:grid-cols-2 lg:grid-cols-4\">\n          <div>\n            <div class=\"ds-form-label\">ORDER BY</div>\n            <input\n              class=\"ds-input\"\n              bind:value={uploadForm.orderBy}\n              placeholder=\"tuple()\"\n            />\n          </div>\n          <div>\n            <div class=\"ds-form-label\">PARTITION BY</div>\n            <input\n              class=\"ds-input\"\n              bind:value={uploadForm.partitionBy}\n              placeholder=\"toYYYYMM(created_at)\"\n            />\n          </div>\n          <div>\n            <div class=\"ds-form-label\">PRIMARY KEY</div>\n            <input\n              class=\"ds-input\"\n              bind:value={uploadForm.primaryKey}\n              placeholder=\"id\"\n            />\n          </div>\n          <div class=\"flex items-end pb-2\">\n            <label class=\"ds-checkbox-label\">\n              <input\n                type=\"checkbox\"\n                class=\"ds-checkbox\"\n                bind:checked={uploadForm.ifNotExists}\n              />\n              IF NOT EXISTS\n            </label>\n          </div>\n          <div class=\"md:col-span-2 lg:col-span-4\">\n            <div class=\"ds-form-label\">Table Comment (optional)</div>\n            <input\n              class=\"ds-input\"\n              bind:value={uploadForm.comment}\n              placeholder=\"Uploaded dataset table\"\n            />\n          </div>\n        </div>\n      {/if}\n    </div>\n\n    <div class=\"ds-panel p-3 space-y-3\">\n      <div class=\"flex items-center justify-between gap-2\">\n        <h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">\n          Discovered Columns\n        </h3>\n        <span class=\"text-[11px] text-gray-500\"\n          >Edit inferred types before upload if needed.</span\n        >\n      </div>\n\n      {#if uploadColumns.length === 0}\n        <div class=\"text-xs text-gray-500\">\n          Run \"Discover Schema\" to populate columns.\n        </div>\n      {:else}\n        <div class=\"overflow-x-auto overflow-y-visible pb-1\">\n          <table class=\"ds-table min-w-[720px]\">\n            <thead>\n              <tr class=\"ds-table-head-row\">\n                <th class=\"ds-table-th\">Name</th>\n                <th class=\"ds-table-th\">Type</th>\n                <th class=\"ds-table-th\">Sample</th>\n              </tr>\n            </thead>\n            <tbody>\n              {#each uploadColumns as col}\n                <tr class=\"ds-table-row-static\">\n                  <td class=\"py-2 px-3 align-top\">\n                    <input\n                      class=\"ds-input-sm\"\n                      value={col.name}\n                      oninput={(e) =>\n                        updateUploadColumn(col.id, {\n                          name: (e.currentTarget as HTMLInputElement).value,\n                        })}\n                    />\n                  </td>\n                  <td class=\"py-2 px-3 align-top\">\n                    <Combobox\n                      options={dataTypeOptionsFor(col.type)}\n                      value={col.type}\n                      onChange={(value) =>\n                        updateUploadColumn(col.id, { type: value })}\n                      placeholder={dataTypesLoading\n                        ? \"Loading types...\"\n                        : \"Select type\"}\n                      disabled={dataTypesLoading}\n                    />\n                  </td>\n                  <td\n                    class=\"py-2 px-3 align-top text-xs text-gray-500 dark:text-gray-400\"\n                  >\n                    <span class=\"line-clamp-2 break-all\"\n                      >{col.sample || \"—\"}</span\n                    >\n                  </td>\n                </tr>\n              {/each}\n            </tbody>\n          </table>\n        </div>\n      {/if}\n    </div>\n\n    <div class=\"ds-panel-muted p-3\">\n      <div class=\"text-xs font-medium text-gray-700 dark:text-gray-200 mb-1\">\n        Preview\n      </div>\n      {#if uploadPreviewRows.length === 0}\n        <div class=\"text-xs text-gray-500\">No preview rows yet.</div>\n      {:else}\n        <pre\n          class=\"text-[11px] max-h-40 overflow-auto whitespace-pre-wrap break-all text-gray-600 dark:text-gray-300\">{JSON.stringify(\n            uploadPreviewRows.slice(0, 5),\n            null,\n            2,\n          )}</pre>\n      {/if}\n    </div>\n\n    {#if uploadErrorMessage}\n      <div class=\"rounded-lg border border-red-500/40 bg-red-500/10 p-3\">\n        <div class=\"text-xs font-semibold text-red-200 mb-1\">Upload Error</div>\n        <pre\n          class=\"text-[11px] whitespace-pre-wrap break-words max-h-36 overflow-auto text-red-100\">{uploadErrorMessage}</pre>\n      </div>\n    {/if}\n\n    {#if uploadCreateSQL || uploadInsertSQL}\n      <div class=\"ds-panel p-3 space-y-2\">\n        <div class=\"text-xs font-semibold text-gray-800 dark:text-gray-200\">\n          Executed Commands\n        </div>\n        {#if uploadCreateSQL}\n          <div>\n            <div class=\"text-[11px] text-gray-500 mb-1\">CREATE TABLE</div>\n            <pre\n              class=\"text-[11px] max-h-32 overflow-auto whitespace-pre-wrap break-all text-gray-600 dark:text-gray-300\">{uploadCreateSQL}</pre>\n          </div>\n        {/if}\n        {#if uploadInsertSQL}\n          <div>\n            <div class=\"text-[11px] text-gray-500 mb-1\">\n              INSERT (sample/batch)\n            </div>\n            <pre\n              class=\"text-[11px] max-h-32 overflow-auto whitespace-pre-wrap break-all text-gray-600 dark:text-gray-300\">{uploadInsertSQL}</pre>\n          </div>\n        {/if}\n      </div>\n    {/if}\n\n    <div class=\"flex items-center justify-end gap-2 pt-1\">\n      <button\n        type=\"button\"\n        class=\"ds-btn-outline\"\n        onclick={() => (uploadSheetOpen = false)}>Cancel</button\n      >\n      <button\n        type=\"submit\"\n        class=\"ds-btn-primary\"\n        disabled={uploadSubmitting || uploadDiscovering}\n      >\n        <Upload size={14} />\n        {uploadSubmitting ? \"Uploading...\" : \"Upload Data\"}\n      </button>\n    </div>\n  </form>\n</Sheet>\n\n<Sheet\n  open={deleteTableSheetOpen}\n  title=\"Delete Table\"\n  size=\"md\"\n  onclose={() => (deleteTableSheetOpen = false)}\n>\n  <form\n    class=\"space-y-4\"\n    onsubmit={(e) => {\n      e.preventDefault();\n      submitDeleteTable();\n    }}\n  >\n    <div\n      class=\"rounded-lg border border-red-500/40 bg-red-500/10 p-3 text-sm text-red-200 flex items-start gap-2\"\n    >\n      <AlertTriangle size={16} class=\"mt-0.5 text-red-400\" />\n      <div>\n        This will permanently delete <strong\n          >{deleteTableForm.database}.{deleteTableForm.name}</strong\n        >.\n      </div>\n    </div>\n\n    <div class=\"grid gap-4 md:grid-cols-2\">\n      <div>\n        <div class=\"ds-form-label\">Cluster (optional)</div>\n        <Combobox\n          options={clusterOptions}\n          value={deleteTableForm.onCluster}\n          onChange={(value) =>\n            (deleteTableForm = { ...deleteTableForm, onCluster: value })}\n          placeholder={clustersLoading ? \"Loading clusters...\" : \"No cluster\"}\n          disabled={clustersLoading}\n        />\n      </div>\n      <div class=\"flex items-end pb-2\">\n        <label class=\"ds-checkbox-label\">\n          <input\n            type=\"checkbox\"\n            class=\"ds-checkbox\"\n            bind:checked={deleteTableForm.sync}\n          />\n          Use SYNC drop\n        </label>\n      </div>\n    </div>\n\n    <div>\n      <div class=\"ds-form-label\">Type full name to confirm</div>\n      <input\n        class=\"ds-input\"\n        bind:value={deleteTableForm.typedName}\n        placeholder={`${deleteTableForm.database}.${deleteTableForm.name}`}\n      />\n    </div>\n\n    <div class=\"flex items-center justify-end gap-2 pt-1\">\n      <button\n        type=\"button\"\n        class=\"ds-btn-outline\"\n        onclick={() => (deleteTableSheetOpen = false)}>Cancel</button\n      >\n      <button\n        type=\"submit\"\n        class=\"inline-flex items-center justify-center gap-1.5 rounded px-3 py-1.5 text-[13px] font-medium text-white bg-red-600 border border-red-500 transition-colors hover:bg-red-700 disabled:opacity-60 disabled:cursor-not-allowed\"\n        disabled={deleteTableSubmitting ||\n          deleteTableForm.typedName.trim() !==\n            `${deleteTableForm.database}.${deleteTableForm.name}`}\n      >\n        <Trash2 size={14} />\n        {deleteTableSubmitting ? \"Deleting...\" : \"Delete Table\"}\n      </button>\n    </div>\n  </form>\n</Sheet>\n"
  },
  {
    "path": "ui/src/lib/components/governance/LineageGraph.svelte",
    "content": "<script lang=\"ts\">\n  import {\n    SvelteFlow,\n    Controls,\n    Background,\n    MiniMap,\n    type Edge,\n    type NodeTypes,\n  } from '@xyflow/svelte'\n  import '@xyflow/svelte/dist/style.css'\n  import { getTheme } from '../../stores/theme.svelte'\n  import type { LineageGraph as LineageGraphType, LineageEdge } from '../../types/governance'\n  import { layoutLineageGraph } from '../../utils/lineage-layout'\n  import LineageTableNode from './LineageTableNode.svelte'\n\n  interface Props {\n    graph: LineageGraphType\n    searchFilter?: string\n    onedgeclick?: (edge: LineageEdge) => void\n    onnodeclick?: (nodeId: string) => void\n  }\n\n  let { graph, searchFilter = '', onedgeclick, onnodeclick }: Props = $props()\n\n  const nodeTypes: NodeTypes = {\n    lineageTable: LineageTableNode as unknown as NodeTypes['lineageTable'],\n  }\n\n  const filteredGraph = $derived.by(() => {\n    if (!searchFilter.trim()) return graph\n    const q = searchFilter.toLowerCase()\n    const matchingNodes = graph.nodes.filter(\n      (n) =>\n        n.table.toLowerCase().includes(q) ||\n        n.database.toLowerCase().includes(q) ||\n        n.id.toLowerCase().includes(q),\n    )\n    const matchingIds = new Set(matchingNodes.map((n) => n.id))\n    // Also include directly connected nodes\n    for (const edge of graph.edges) {\n      const srcKey = `${edge.source_database}.${edge.source_table}`\n      const tgtKey = `${edge.target_database}.${edge.target_table}`\n      if (matchingIds.has(srcKey)) matchingIds.add(tgtKey)\n      if (matchingIds.has(tgtKey)) matchingIds.add(srcKey)\n    }\n    const nodes = graph.nodes.filter((n) => matchingIds.has(n.id))\n    const nodeIdSet = new Set(nodes.map((n) => n.id))\n    const edges = graph.edges.filter((e) => {\n      const srcKey = `${e.source_database}.${e.source_table}`\n      const tgtKey = `${e.target_database}.${e.target_table}`\n      return nodeIdSet.has(srcKey) && nodeIdSet.has(tgtKey)\n    })\n    return { nodes, edges }\n  })\n\n  const flowNodes = $derived(layoutLineageGraph(filteredGraph.nodes, filteredGraph.edges))\n\n  const flowEdges = $derived<Edge[]>(\n    filteredGraph.edges.map((e) => {\n      const isDashed = e.edge_type === 'create_as_select' || e.edge_type === 'materialized_to'\n      return {\n        id: e.id,\n        source: `${e.source_database}.${e.source_table}`,\n        target: `${e.target_database}.${e.target_table}`,\n        animated: true,\n        style: isDashed\n          ? 'stroke: #3b82f6; stroke-width: 2px; stroke-dasharray: 5 3;'\n          : 'stroke: #f97316; stroke-width: 2px;',\n      }\n    }),\n  )\n\n  // Map edge IDs to LineageEdge for click handler\n  const edgeMap = $derived(new Map(filteredGraph.edges.map((e) => [e.id, e])))\n\n  function handleEdgeClick(event: { edge: Edge }) {\n    const lineageEdge = edgeMap.get(event.edge.id)\n    if (lineageEdge && onedgeclick) onedgeclick(lineageEdge)\n  }\n\n  function handleNodeClick(event: { node: { id: string } }) {\n    if (onnodeclick) onnodeclick(event.node.id)\n  }\n</script>\n\n<SvelteFlow\n  nodes={flowNodes}\n  edges={flowEdges}\n  {nodeTypes}\n  fitView\n  colorMode={getTheme()}\n  proOptions={{ hideAttribution: true }}\n  onedgeclick={handleEdgeClick}\n  onnodeclick={handleNodeClick}\n  defaultEdgeOptions={{ animated: true }}\n>\n  <Controls />\n  <Background gap={16} />\n  <MiniMap />\n</SvelteFlow>\n"
  },
  {
    "path": "ui/src/lib/components/governance/LineageTableNode.svelte",
    "content": "<script lang=\"ts\">\n  import { Handle, Position } from '@xyflow/svelte'\n  import { Database, ChevronDown, ChevronRight } from 'lucide-svelte'\n  import type { GovColumn } from '../../types/governance'\n\n  interface Props {\n    data: {\n      database: string\n      table: string\n      nodeType: string\n      columns: GovColumn[]\n      linkedColumns: string[]\n    }\n  }\n\n  let { data }: Props = $props()\n  let expanded = $state(false)\n\n  function nodeColor(type: string): { border: string; bg: string } {\n    switch (type) {\n      case 'current':\n      case 'materialized_view':\n        return { border: 'border-orange-400 dark:border-orange-500', bg: 'bg-orange-50 dark:bg-orange-900/20' }\n      case 'source':\n        return { border: 'border-blue-400 dark:border-blue-500', bg: 'bg-blue-50 dark:bg-blue-900/20' }\n      case 'view':\n        return { border: 'border-purple-400 dark:border-purple-500', bg: 'bg-purple-50 dark:bg-purple-900/20' }\n      default: // target\n        return { border: 'border-green-400 dark:border-green-500', bg: 'bg-green-50 dark:bg-green-900/20' }\n    }\n  }\n\n  const colors = $derived(nodeColor(data.nodeType))\n  const borderColor = $derived(colors.border)\n  const bgColor = $derived(colors.bg)\n\n  const linkedSet = $derived(new Set(data.linkedColumns))\n</script>\n\n<div class=\"rounded-lg border-2 {borderColor} {bgColor} shadow-sm w-[220px]\">\n  <Handle type=\"target\" position={Position.Left} class=\"!bg-blue-500 !w-3 !h-3 !border-2 !border-white dark:!border-gray-900\" />\n\n  <div class=\"px-3 py-2\">\n    <div class=\"flex items-center gap-2\">\n      <Database size={14} class=\"text-gray-500 shrink-0\" />\n      <div class=\"min-w-0 flex-1\">\n        <div class=\"text-[10px] text-gray-500 dark:text-gray-400 truncate\">{data.database}</div>\n        <div class=\"text-xs font-semibold text-gray-800 dark:text-gray-200 truncate\">{data.table}</div>\n        {#if data.nodeType === 'materialized_view'}\n          <span class=\"text-[9px] px-1 py-0.5 rounded bg-orange-200/60 dark:bg-orange-800/40 text-orange-700 dark:text-orange-300\">MV</span>\n        {:else if data.nodeType === 'view'}\n          <span class=\"text-[9px] px-1 py-0.5 rounded bg-purple-200/60 dark:bg-purple-800/40 text-purple-700 dark:text-purple-300\">View</span>\n        {/if}\n      </div>\n    </div>\n  </div>\n\n  {#if data.columns && data.columns.length > 0}\n    <button\n      class=\"w-full flex items-center gap-1.5 px-3 py-1 text-[10px] text-gray-500 hover:bg-gray-100 dark:hover:bg-gray-800/40 border-t border-gray-200 dark:border-gray-700/50 cursor-pointer\"\n      onclick={() => (expanded = !expanded)}\n    >\n      {#if expanded}\n        <ChevronDown size={10} />\n      {:else}\n        <ChevronRight size={10} />\n      {/if}\n      {data.columns.length} columns\n    </button>\n\n    {#if expanded}\n      <div class=\"max-h-40 overflow-auto px-2 pb-2\">\n        {#each data.columns as col}\n          <div class=\"flex items-center gap-1.5 py-0.5 px-1 rounded {linkedSet.has(col.column_name) ? 'bg-orange-100/60 dark:bg-orange-900/30' : ''}\">\n            <span\n              class=\"text-[10px] font-mono truncate flex-1 {linkedSet.has(col.column_name)\n                ? 'text-orange-700 dark:text-orange-300 font-semibold'\n                : 'text-gray-600 dark:text-gray-400'}\"\n            >\n              {col.column_name}\n            </span>\n            <span class=\"text-[9px] text-gray-400 dark:text-gray-500 shrink-0\">{col.column_type}</span>\n          </div>\n        {/each}\n      </div>\n    {/if}\n  {/if}\n\n  <Handle type=\"source\" position={Position.Right} class=\"!bg-orange-500 !w-3 !h-3 !border-2 !border-white dark:!border-gray-900\" />\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/layout/CommandPalette.svelte",
    "content": "<script lang=\"ts\">\n  import { tick } from 'svelte'\n  import { Search, Plus, Table2, Sparkles, LayoutDashboard, Bookmark, Clock, Brain, Shield, Settings, Moon, Sun, LogOut, SquareTerminal, Home } from 'lucide-svelte'\n  import { closeCommandPalette, isCommandPaletteOpen } from '../../stores/command-palette.svelte'\n  import { openQueryTab, openSingletonTab, openTableTab, openDashboardTab, getTabs, openHomeTab } from '../../stores/tabs.svelte'\n  import { getDatabases, loadDatabases } from '../../stores/schema.svelte'\n  import { getSession, logout } from '../../stores/session.svelte'\n  import { getTheme, toggleTheme } from '../../stores/theme.svelte'\n\n  interface CommandItem {\n    id: string\n    title: string\n    subtitle: string\n    keywords: string\n    icon: typeof Search\n    run: () => void\n  }\n\n  let inputEl: HTMLInputElement | undefined = $state()\n  let query = $state('')\n  let selectedIdx = $state(0)\n\n  const open = $derived(isCommandPaletteOpen())\n  const tabs = $derived(getTabs())\n  const databases = $derived(getDatabases())\n  const session = $derived(getSession())\n\n  function sequentialMatchScore(text: string, term: string): number {\n    if (!term) return 1\n    let ti = 0\n    let score = 0\n    const lowerText = text.toLowerCase()\n    const lowerTerm = term.toLowerCase()\n\n    for (let i = 0; i < lowerText.length && ti < lowerTerm.length; i++) {\n      if (lowerText[i] === lowerTerm[ti]) {\n        score += i > 0 && lowerText[i - 1] === ' ' ? 5 : 2\n        ti++\n      }\n    }\n\n    if (ti !== lowerTerm.length) return -1\n    if (lowerText.startsWith(lowerTerm)) score += 25\n    if (lowerText.includes(` ${lowerTerm}`)) score += 10\n    return score\n  }\n\n  const actions = $derived.by(() => {\n    const items: CommandItem[] = [\n      {\n        id: 'action-home',\n        title: 'Home',\n        subtitle: 'Navigate',\n        keywords: 'home workspace start',\n        icon: Home,\n        run: () => openHomeTab(),\n      },\n      {\n        id: 'action-new-query',\n        title: 'New Query',\n        subtitle: 'Workspace action',\n        keywords: 'new query sql run editor',\n        icon: Plus,\n        run: () => openQueryTab(),\n      },\n      {\n        id: 'action-saved',\n        title: 'Saved Queries',\n        subtitle: 'Navigate',\n        keywords: 'saved queries history bookmarks',\n        icon: Bookmark,\n        run: () => openSingletonTab('saved-queries', 'Saved Queries'),\n      },\n      {\n        id: 'action-dashboards',\n        title: 'Dashboards',\n        subtitle: 'Navigate',\n        keywords: 'charts panels dashboards metrics',\n        icon: LayoutDashboard,\n        run: () => openSingletonTab('dashboards', 'Dashboards'),\n      },\n      {\n        id: 'action-schedules',\n        title: 'Schedules',\n        subtitle: 'Navigate',\n        keywords: 'cron scheduled jobs runs',\n        icon: Clock,\n        run: () => openSingletonTab('schedules', 'Schedules'),\n      },\n      {\n        id: 'action-brain',\n        title: 'Brain AI',\n        subtitle: 'Navigate',\n        keywords: 'brain ai assistant sql helper',\n        icon: Brain,\n        run: () => openSingletonTab('brain', 'Brain'),\n      },\n      {\n        id: 'action-admin',\n        title: 'Admin',\n        subtitle: 'Navigate',\n        keywords: 'admin users audit logs query log',\n        icon: Shield,\n        run: () => openSingletonTab('admin', 'Admin'),\n      },\n      {\n        id: 'action-settings',\n        title: 'License',\n        subtitle: 'Navigate',\n        keywords: 'license settings config entitlements',\n        icon: Settings,\n        run: () => openSingletonTab('settings', 'License'),\n      },\n      {\n        id: 'action-theme',\n        title: getTheme() === 'dark' ? 'Switch to Light Theme' : 'Switch to Dark Theme',\n        subtitle: 'Appearance',\n        keywords: 'theme dark light appearance',\n        icon: getTheme() === 'dark' ? Sun : Moon,\n        run: () => toggleTheme(),\n      },\n    ]\n\n    if (session) {\n      items.push({\n        id: 'action-logout',\n        title: 'Logout',\n        subtitle: 'Session',\n        keywords: 'logout sign out session',\n        icon: LogOut,\n        run: () => logout(),\n      })\n    }\n\n    for (const tab of tabs.filter((entry) => entry.type !== 'home').slice(-8).reverse()) {\n      const icon = tab.type === 'query'\n        ? SquareTerminal\n        : (tab.type === 'table'\n            ? Table2\n            : (tab.type === 'dashboard' ? LayoutDashboard : Sparkles))\n      items.push({\n        id: `tab-${tab.id}`,\n        title: `Open ${tab.name}`,\n        subtitle: 'Recent tab',\n        keywords: `${tab.name} tab ${tab.type}`,\n        icon,\n        run: () => {\n          if (tab.type === 'table') openTableTab(tab.database, tab.table)\n          if (tab.type === 'saved-queries') openSingletonTab('saved-queries', 'Saved Queries')\n          if (tab.type === 'dashboard') openDashboardTab(tab.dashboardId, tab.name)\n          if (tab.type === 'dashboards') openSingletonTab('dashboards', 'Dashboards')\n          if (tab.type === 'schedules') openSingletonTab('schedules', 'Schedules')\n          if (tab.type === 'brain') openSingletonTab('brain', 'Brain')\n          if (tab.type === 'admin') openSingletonTab('admin', 'Admin')\n          if (tab.type === 'settings') openSingletonTab('settings', 'License')\n          if (tab.type === 'governance') openSingletonTab('governance', 'Governance')\n          if (tab.type === 'query') openQueryTab(tab.sql)\n        },\n      })\n    }\n\n    for (const db of databases.slice(0, 12)) {\n      if (!db.tables) continue\n      for (const t of db.tables.slice(0, 12)) {\n        items.push({\n          id: `table-${db.name}.${t.name}`,\n          title: `${db.name}.${t.name}`,\n          subtitle: 'Open table',\n          keywords: `${db.name} ${t.name} table schema`,\n          icon: Table2,\n          run: () => openTableTab(db.name, t.name),\n        })\n      }\n    }\n\n    return items\n  })\n\n  const filtered = $derived.by(() => {\n    const term = query.trim().toLowerCase()\n    const ranked = actions\n      .map((item) => {\n        const text = `${item.title} ${item.subtitle} ${item.keywords}`\n        return { item, score: sequentialMatchScore(text, term) }\n      })\n      .filter((x) => x.score >= 0)\n      .sort((a, b) => b.score - a.score)\n      .slice(0, 28)\n      .map((x) => x.item)\n\n    return ranked\n  })\n\n  async function runCommand(item: CommandItem) {\n    item.run()\n    closeCommandPalette()\n    query = ''\n    selectedIdx = 0\n  }\n\n  function handleKeydown(e: KeyboardEvent) {\n    if (!open) return\n\n    if (e.key === 'Escape') {\n      e.preventDefault()\n      closeCommandPalette()\n      return\n    }\n\n    if (e.key === 'ArrowDown') {\n      e.preventDefault()\n      selectedIdx = Math.min(filtered.length - 1, selectedIdx + 1)\n      return\n    }\n\n    if (e.key === 'ArrowUp') {\n      e.preventDefault()\n      selectedIdx = Math.max(0, selectedIdx - 1)\n      return\n    }\n\n    if (e.key === 'Enter' && filtered[selectedIdx]) {\n      e.preventDefault()\n      runCommand(filtered[selectedIdx])\n    }\n  }\n\n  $effect(() => {\n    if (!open) return\n\n    if (databases.length === 0) {\n      loadDatabases()\n    }\n\n    query = ''\n    selectedIdx = 0\n\n    tick().then(() => inputEl?.focus())\n  })\n</script>\n\n<svelte:window onkeydown={handleKeydown} />\n\n{#if open}\n  <button\n    type=\"button\"\n    class=\"fixed inset-0 z-[80] bg-gray-950/45 backdrop-blur-sm\"\n    aria-label=\"Close command palette\"\n    onclick={() => closeCommandPalette()}\n  ></button>\n  <div class=\"fixed inset-0 z-[81] flex items-start justify-center pt-[12vh] px-4\">\n    <div class=\"surface-card w-full max-w-2xl rounded-2xl overflow-hidden\">\n      <div class=\"flex items-center gap-2 px-3 py-2.5 border-b border-gray-200/80 dark:border-gray-800/80\">\n        <Search size={14} class=\"text-gray-500\" />\n        <input\n          bind:this={inputEl}\n          bind:value={query}\n          type=\"text\"\n          placeholder=\"Search actions, tables, tabs...\"\n          class=\"w-full bg-transparent text-sm text-gray-800 dark:text-gray-200 placeholder:text-gray-400 outline-none\"\n        />\n        <span class=\"text-[10px] text-gray-400 px-2 py-1 rounded border border-gray-300/70 dark:border-gray-700/80\">ESC</span>\n      </div>\n\n      <div class=\"max-h-[55vh] overflow-y-auto p-1.5\">\n        {#if filtered.length === 0}\n          <div class=\"px-3 py-8 text-center text-sm text-gray-500\">No command found</div>\n        {:else}\n          {#each filtered as item, idx (item.id)}\n            <button\n              class=\"w-full flex items-center gap-3 px-3 py-2 rounded-lg text-left transition-colors {idx === selectedIdx ? 'bg-ch-blue/10 text-ch-blue' : 'hover:bg-gray-200/55 dark:hover:bg-gray-800/60 text-gray-700 dark:text-gray-300'}\"\n              onclick={() => runCommand(item)}\n              onmouseenter={() => selectedIdx = idx}\n            >\n              <item.icon size={15} class={idx === selectedIdx ? 'text-ch-blue' : 'text-gray-500'} />\n              <span class=\"flex-1 min-w-0\">\n                <span class=\"block text-sm font-medium truncate\">{item.title}</span>\n                <span class=\"block text-[11px] text-gray-500 dark:text-gray-400 truncate\">{item.subtitle}</span>\n              </span>\n              {#if idx === selectedIdx}\n                <span class=\"text-[10px] text-gray-500 px-2 py-1 rounded border border-gray-300/70 dark:border-gray-700/80\">ENTER</span>\n              {/if}\n            </button>\n          {/each}\n        {/if}\n      </div>\n\n      <div class=\"px-3 py-2 border-t border-gray-200/80 dark:border-gray-800/80 text-[11px] text-gray-500 dark:text-gray-400\">\n        Use <span class=\"font-medium\">↑ ↓</span> to navigate, <span class=\"font-medium\">Enter</span> to run.\n      </div>\n    </div>\n  </div>\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/layout/Shell.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import Sidebar from './Sidebar.svelte'\n  import TabGroup from './TabGroup.svelte'\n  import CommandPalette from './CommandPalette.svelte'\n  import { getGroups, isSplit, setFocusedGroup, splitTabToSide, openQueryTab } from '../../stores/tabs.svelte'\n  import { openCommandPalette } from '../../stores/command-palette.svelte'\n\n  const groups = $derived(getGroups())\n  const split = $derived(isSplit())\n\n  // Split pane resize state\n  let splitPercent = $state(50)\n  let resizing = $state(false)\n  let containerEl: HTMLDivElement | undefined = $state()\n  const EDGE_SPLIT_WIDTH = 72\n  let edgeSplitVisible = $state(false)\n  let edgeSplitSide = $state<'left' | 'right' | null>(null)\n\n  // ── Resize handlers ──────────────────────────────────────────\n\n  function onResizeStart(e: MouseEvent) {\n    e.preventDefault()\n    resizing = true\n    document.addEventListener('mousemove', onResizeMove)\n    document.addEventListener('mouseup', onResizeEnd)\n  }\n\n  function onResizeMove(e: MouseEvent) {\n    if (!containerEl) return\n    const rect = containerEl.getBoundingClientRect()\n    const x = e.clientX - rect.left\n    splitPercent = Math.max(20, Math.min(80, (x / rect.width) * 100))\n  }\n\n  function onResizeEnd() {\n    resizing = false\n    document.removeEventListener('mousemove', onResizeMove)\n    document.removeEventListener('mouseup', onResizeEnd)\n  }\n\n  function handleGlobalShortcuts(e: KeyboardEvent) {\n    const mod = e.metaKey || e.ctrlKey\n    const target = e.target as HTMLElement | null\n    const isTypingTarget = !!target && (\n      target.tagName === 'INPUT' ||\n      target.tagName === 'TEXTAREA' ||\n      target.tagName === 'SELECT' ||\n      target.isContentEditable\n    )\n\n    if (mod && e.key.toLowerCase() === 'k') {\n      e.preventDefault()\n      openCommandPalette()\n      return\n    }\n\n    if (!isTypingTarget && (e.altKey && e.key.toLowerCase() === 'k' || e.key === '/')) {\n      e.preventDefault()\n      openCommandPalette()\n      return\n    }\n\n    if (isTypingTarget) return\n\n    if ((mod && e.shiftKey && e.key.toLowerCase() === 'n') || (e.altKey && e.key.toLowerCase() === 'n')) {\n      e.preventDefault()\n      openQueryTab()\n    }\n  }\n\n  onMount(() => {\n    window.addEventListener('keydown', handleGlobalShortcuts, true)\n    return () => window.removeEventListener('keydown', handleGlobalShortcuts, true)\n  })\n\n  function resetEdgeSplitState() {\n    edgeSplitVisible = false\n    edgeSplitSide = null\n  }\n\n  function isTabDrag(e: DragEvent): boolean {\n    const types = e.dataTransfer?.types\n    if (!types) return false\n    return types.includes('text/plain')\n  }\n\n  function handleContentDragOver(e: DragEvent) {\n    if (!isTabDrag(e)) return\n    const dt = e.dataTransfer\n    if (!dt) return\n    if (!containerEl) return\n    e.preventDefault()\n    dt.dropEffect = 'move'\n\n    const rect = containerEl.getBoundingClientRect()\n    const x = e.clientX - rect.left\n    edgeSplitVisible = true\n\n    if (x <= EDGE_SPLIT_WIDTH) {\n      edgeSplitSide = 'left'\n    } else if (x >= rect.width - EDGE_SPLIT_WIDTH) {\n      edgeSplitSide = 'right'\n    } else {\n      edgeSplitSide = null\n    }\n  }\n\n  function handleEdgeDragOver(side: 'left' | 'right', e: DragEvent) {\n    if (!isTabDrag(e)) return\n    const dt = e.dataTransfer\n    if (!dt) return\n    e.preventDefault()\n    dt.dropEffect = 'move'\n    edgeSplitVisible = true\n    edgeSplitSide = side\n  }\n\n  function handleEdgeDrop(side: 'left' | 'right', e: DragEvent) {\n    const tabId = e.dataTransfer?.getData('text/plain')\n    if (!tabId) {\n      resetEdgeSplitState()\n      return\n    }\n    e.preventDefault()\n    splitTabToSide(tabId, side)\n    resetEdgeSplitState()\n  }\n\n  function handleContentDrop(e: DragEvent) {\n    const tabId = e.dataTransfer?.getData('text/plain')\n    if (!tabId || !edgeSplitSide) {\n      resetEdgeSplitState()\n      return\n    }\n    e.preventDefault()\n    splitTabToSide(tabId, edgeSplitSide)\n    resetEdgeSplitState()\n  }\n\n  function handleContentDragLeave() {\n    edgeSplitSide = null\n  }\n</script>\n\n<svelte:window ondragend={resetEdgeSplitState} ondrop={resetEdgeSplitState} />\n\n<div class=\"flex h-full\">\n  <Sidebar />\n  <!-- svelte-ignore a11y_no_static_element_interactions -->\n  <div\n    class=\"relative flex flex-1 min-w-0\"\n    bind:this={containerEl}\n    ondragover={handleContentDragOver}\n    ondrop={handleContentDrop}\n    ondragleave={handleContentDragLeave}\n  >\n    {#each groups as group, i (group.id)}\n      <!-- svelte-ignore a11y_no_static_element_interactions -->\n      <div\n        class=\"flex flex-col min-w-0 overflow-hidden\"\n        style={split ? `width: ${i === 0 ? splitPercent : 100 - splitPercent}%` : 'flex: 1'}\n        onclick={() => setFocusedGroup(group.id)}\n        onkeydown={(e) => {\n          // Only treat Space/Enter as pane activation when the pane itself is focused.\n          if (e.target !== e.currentTarget) return\n          if (e.key === 'Enter' || e.key === ' ') {\n            e.preventDefault()\n            setFocusedGroup(group.id)\n          }\n        }}\n        role=\"button\"\n        tabindex=\"0\"\n      >\n        <TabGroup groupId={group.id} />\n      </div>\n\n      {#if split && i === 0}\n        <!-- Resize handle between split panes -->\n        <!-- svelte-ignore a11y_no_static_element_interactions -->\n        <div\n          class=\"w-1 shrink-0 cursor-col-resize group/split flex items-center justify-center hover:bg-ch-blue/20 transition-colors {resizing ? 'bg-ch-blue/30' : ''}\"\n          onmousedown={onResizeStart}\n          ondblclick={() => splitPercent = 50}\n        >\n          <div class=\"h-8 w-0.5 rounded-full {resizing ? 'bg-ch-blue' : 'bg-gray-300 dark:bg-gray-700 group-hover/split:bg-ch-blue/60'} transition-colors\"></div>\n        </div>\n      {/if}\n    {/each}\n\n    {#if edgeSplitVisible}\n      <div class=\"pointer-events-none absolute inset-0 z-30\">\n        <!-- svelte-ignore a11y_no_static_element_interactions -->\n        <div\n          class=\"pointer-events-auto absolute left-0 top-0 bottom-0 transition-colors\"\n          style={`width:${EDGE_SPLIT_WIDTH}px`}\n          ondragover={(e) => handleEdgeDragOver('left', e)}\n          ondrop={(e) => handleEdgeDrop('left', e)}\n        >\n          <div class=\"absolute inset-0 transition-colors {edgeSplitSide === 'left' ? 'bg-ch-blue/16 border-r border-ch-blue/50' : 'bg-transparent'}\"></div>\n        </div>\n        <!-- svelte-ignore a11y_no_static_element_interactions -->\n        <div\n          class=\"pointer-events-auto absolute right-0 top-0 bottom-0 transition-colors\"\n          style={`width:${EDGE_SPLIT_WIDTH}px`}\n          ondragover={(e) => handleEdgeDragOver('right', e)}\n          ondrop={(e) => handleEdgeDrop('right', e)}\n        >\n          <div class=\"absolute inset-0 transition-colors {edgeSplitSide === 'right' ? 'bg-ch-blue/16 border-l border-ch-blue/50' : 'bg-transparent'}\"></div>\n        </div>\n      </div>\n    {/if}\n\n  </div>\n</div>\n\n<CommandPalette />\n\n{#if resizing}\n  <div class=\"fixed inset-0 z-50 cursor-col-resize\"></div>\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/layout/Sidebar.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import { getActiveTab, openQueryTab, openSingletonTab, openTableTab } from '../../stores/tabs.svelte'\n  import type { SingletonTab } from '../../stores/tabs.svelte'\n  import { getSession, logout } from '../../stores/session.svelte'\n  import { toggleTheme, getTheme } from '../../stores/theme.svelte'\n  import { openCommandPalette } from '../../stores/command-palette.svelte'\n  import { isProActive, loadLicense } from '../../stores/license.svelte'\n  import DatabaseTree from '../explorer/DatabaseTree.svelte'\n  import {\n    Plus,\n    Bookmark,\n    LayoutDashboard,\n    Clock,\n    Brain,\n    Shield,\n    Scale,\n    Workflow,\n    Boxes,\n    Settings,\n    Database,\n    Sun,\n    Moon,\n    LogOut,\n    Search,\n    ChevronDown,\n    ExternalLink,\n    ChevronLeft,\n    ChevronRight,\n  } from 'lucide-svelte'\n\n  const session = $derived(getSession())\n\n  interface NavItemInternal {\n    type: SingletonTab['type']\n    label: string\n    icon: typeof Bookmark\n    pro?: boolean\n  }\n  interface NavItemExternal {\n    type: 'external'\n    label: string\n    icon: typeof Bookmark\n    href: string\n  }\n  type NavItem = NavItemInternal | NavItemExternal\n\n  const navItems: NavItem[] = [\n    { type: 'saved-queries', label: 'Saved Queries', icon: Bookmark },\n    { type: 'dashboards', label: 'Dashboards', icon: LayoutDashboard },\n    { type: 'pipelines', label: 'Pipelines', icon: Workflow },\n    { type: 'models', label: 'Models', icon: Boxes },\n    { type: 'schedules', label: 'Schedules', icon: Clock, pro: true },\n    { type: 'brain', label: 'Brain', icon: Brain },\n    { type: 'governance', label: 'Governance', icon: Scale, pro: true },\n    { type: 'admin', label: 'Admin', icon: Shield },\n    { type: 'settings', label: 'License', icon: Settings },\n    { type: 'external', label: 'CH-UI Docs', icon: ExternalLink, href: 'https://ch-ui.com/docs' },\n  ]\n\n  const MIN_WIDTH = 200\n  const MAX_WIDTH = 500\n  const COLLAPSE_THRESHOLD = 120\n  const COLLAPSED_WIDTH = 40\n  const DEFAULT_WIDTH = 244\n\n  const savedCollapsed = localStorage.getItem('ch-ui-sidebar-collapsed') === 'true'\n  const savedWidth = parseInt(localStorage.getItem('ch-ui-sidebar-width') ?? String(DEFAULT_WIDTH), 10)\n  const savedMenuCollapsed = localStorage.getItem('ch-ui-sidebar-menu-collapsed') === 'true'\n\n  let collapsed = $state(savedCollapsed)\n  let sidebarWidth = $state(isNaN(savedWidth) ? DEFAULT_WIDTH : savedWidth)\n  let menuCollapsed = $state(savedMenuCollapsed)\n  let dragging = $state(false)\n  const licensedPro = $derived(isProActive())\n  const activeTab = $derived(getActiveTab())\n\n  onMount(() => {\n    loadLicense()\n\n    function handleKeydown(e: KeyboardEvent) {\n      if ((e.metaKey || e.ctrlKey) && e.key === 'b') {\n        e.preventDefault()\n        toggleCollapse()\n      }\n    }\n\n    document.addEventListener('keydown', handleKeydown)\n\n    return () => {\n      document.removeEventListener('mousemove', onDragMove)\n      document.removeEventListener('mouseup', onDragEnd)\n      document.removeEventListener('keydown', handleKeydown)\n    }\n  })\n\n  function toggleCollapse() {\n    collapsed = !collapsed\n    localStorage.setItem('ch-ui-sidebar-collapsed', String(collapsed))\n  }\n\n  function toggleMenuCollapsed() {\n    menuCollapsed = !menuCollapsed\n    localStorage.setItem('ch-ui-sidebar-menu-collapsed', String(menuCollapsed))\n  }\n\n  function handleSelectTable(database: string, table: string) {\n    openTableTab(database, table)\n  }\n\n  function isNavItemActive(type: SingletonTab['type']): boolean {\n    if (!activeTab) return false\n    if (type === 'dashboards') return activeTab.type === 'dashboards' || activeTab.type === 'dashboard'\n    if (type === 'models') return activeTab.type === 'models'\n    return activeTab.type === type\n  }\n\n  // ── Drag handle logic ──\n  function onDragStart(e: MouseEvent) {\n    e.preventDefault()\n    dragging = true\n    document.addEventListener('mousemove', onDragMove)\n    document.addEventListener('mouseup', onDragEnd)\n  }\n\n  function onDragMove(e: MouseEvent) {\n    const newWidth = e.clientX\n    if (newWidth < COLLAPSE_THRESHOLD) {\n      collapsed = true\n      localStorage.setItem('ch-ui-sidebar-collapsed', 'true')\n    } else {\n      collapsed = false\n      localStorage.setItem('ch-ui-sidebar-collapsed', 'false')\n      sidebarWidth = Math.max(MIN_WIDTH, Math.min(MAX_WIDTH, newWidth))\n    }\n  }\n\n  function onDragEnd() {\n    dragging = false\n    document.removeEventListener('mousemove', onDragMove)\n    document.removeEventListener('mouseup', onDragEnd)\n    localStorage.setItem('ch-ui-sidebar-width', String(sidebarWidth))\n  }\n</script>\n\n<div class=\"flex shrink-0 h-full\">\n  <nav\n    class=\"flex flex-col border-r border-gray-200/80 dark:border-gray-800/80 bg-white dark:bg-gray-950 overflow-hidden transition-[width] {dragging ? 'duration-0' : 'duration-200'}\"\n    style=\"width: {collapsed ? COLLAPSED_WIDTH : sidebarWidth}px\"\n  >\n    {#if collapsed}\n      <!-- Collapsed: icon-only buttons -->\n      <div class=\"flex flex-col items-center py-2 gap-1 mt-2\">\n        <button\n          class=\"p-2 rounded text-gray-500 dark:text-gray-400 hover:text-gray-800 dark:hover:text-gray-200 hover:bg-gray-200/40 dark:hover:bg-gray-800/40\"\n          onclick={() => openQueryTab()}\n          title=\"New Query\"\n        >\n          <Plus size={17} />\n        </button>\n      </div>\n      <div class=\"flex-1\"></div>\n      <div class=\"flex flex-col items-center py-2 gap-1 border-t border-gray-200 dark:border-gray-800\">\n        {#each navItems as item}\n          {#if item.type === 'external'}\n            <a\n              class=\"p-2 rounded transition-colors text-gray-500 dark:text-gray-400 hover:text-gray-800 dark:hover:text-gray-200 hover:bg-gray-200/40 dark:hover:bg-gray-800/40\"\n              href={item.href}\n              target=\"_blank\"\n              rel=\"noopener noreferrer\"\n              title={item.label}\n            >\n              <item.icon size={17} />\n            </a>\n          {:else}\n            <button\n              class=\"p-2 rounded transition-colors\n                {isNavItemActive(item.type)\n                  ? 'text-ch-orange bg-orange-100/70 dark:bg-orange-500/18'\n                  : 'text-gray-500 dark:text-gray-400 hover:text-gray-800 dark:hover:text-gray-200 hover:bg-gray-200/40 dark:hover:bg-gray-800/40'}\"\n              onclick={() => openSingletonTab(item.type, item.label)}\n              title={item.label}\n            >\n              <item.icon size={17} />\n            </button>\n          {/if}\n        {/each}\n      </div>\n      <!-- Connection status + actions (collapsed) -->\n      <div class=\"flex flex-col items-center py-2 gap-1 border-t border-gray-200 dark:border-gray-800\">\n        {#if session}\n          <span\n            class=\"w-2 h-2 rounded-full my-1 {session.connectionOnline ? 'bg-green-500' : 'bg-red-500'}\"\n            title={session.connectionOnline ? 'Connected' : 'Disconnected'}\n          ></span>\n        {/if}\n        <button\n          class=\"p-2 rounded text-gray-500 dark:text-gray-400 hover:text-gray-800 dark:hover:text-gray-200 hover:bg-gray-200/40 dark:hover:bg-gray-800/40\"\n          onclick={toggleTheme}\n          title=\"Toggle theme\"\n        >\n          {#if getTheme() === 'dark'}\n            <Sun size={17} />\n          {:else}\n            <Moon size={17} />\n          {/if}\n        </button>\n        {#if session}\n          <button\n            class=\"p-2 rounded text-gray-500 dark:text-gray-400 hover:text-red-400 hover:bg-gray-200/40 dark:hover:bg-gray-800/40\"\n            onclick={logout}\n            title=\"Logout\"\n          >\n            <LogOut size={17} />\n          </button>\n        {/if}\n        <!-- Expand button at bottom (collapsed state) -->\n        <button\n          class=\"p-2 rounded text-gray-500 dark:text-gray-400 hover:text-ch-blue dark:hover:text-ch-blue hover:bg-gray-200/40 dark:hover:bg-gray-800/40 transition-colors\"\n          onclick={toggleCollapse}\n          title=\"Expand sidebar (⌘B)\"\n        >\n          <ChevronRight size={17} />\n        </button>\n      </div>\n    {:else}\n      <!-- Expanded: full sidebar -->\n      <div class=\"px-2.5 py-1.5 border-b border-gray-200 dark:border-gray-800\">\n        <button\n          class=\"w-full flex items-center gap-2 px-3 py-2 text-[12px] font-medium rounded-md border border-gray-300/70 dark:border-gray-700/70 text-gray-600 dark:text-gray-300 bg-gray-100/60 dark:bg-gray-900/55 hover:border-ch-blue/50 hover:text-ch-blue transition-colors\"\n          onclick={openCommandPalette}\n          title=\"Open command palette\"\n        >\n          <Search size={14} />\n          <span class=\"flex-1 text-left\">Command Menu</span>\n          <kbd class=\"text-[10px] px-1.5 py-0.5 rounded border border-gray-300 dark:border-gray-700 text-gray-500\">⌘K / ⌥K</kbd>\n        </button>\n      </div>\n      <div class=\"flex-1 min-h-0 flex flex-col\">\n        <div class=\"flex-1 min-h-0 overflow-auto border-b border-gray-200 dark:border-gray-800\">\n          <DatabaseTree onSelectTable={handleSelectTable} />\n        </div>\n\n        <div class=\"shrink-0 border-t border-gray-200 dark:border-gray-800\">\n          <button\n            class=\"w-full flex items-center gap-2 px-3.5 py-2 text-[12px] font-medium text-gray-500 dark:text-gray-400 hover:text-gray-800 dark:hover:text-gray-200 hover:bg-gray-100/70 dark:hover:bg-gray-900/60 transition-colors\"\n            onclick={toggleMenuCollapsed}\n            title={menuCollapsed ? 'Expand menu section' : 'Collapse menu section'}\n            aria-expanded={!menuCollapsed}\n          >\n            <span class=\"uppercase tracking-wide\">Menu</span>\n            <ChevronDown size={14} class=\"ml-auto transition-transform {menuCollapsed ? '' : 'rotate-180'}\" />\n          </button>\n\n          {#if !menuCollapsed}\n            <div class=\"py-1.5\">\n              {#each navItems as item}\n                {#if item.type === 'external'}\n                  <a\n                    class=\"flex items-center gap-2.5 w-full px-3.5 py-2 text-[13px] font-medium transition-colors text-gray-500 dark:text-gray-400 hover:text-gray-800 dark:hover:text-gray-200 hover:bg-gray-200/40 dark:hover:bg-gray-800/40\"\n                    href={item.href}\n                    target=\"_blank\"\n                    rel=\"noopener noreferrer\"\n                  >\n                    <item.icon size={15} />\n                    <span class=\"truncate\">{item.label}</span>\n                  </a>\n                {:else}\n                  <button\n                    class=\"flex items-center gap-2.5 w-full px-3.5 py-2 text-[13px] font-medium transition-colors\n                      {isNavItemActive(item.type)\n                        ? 'text-ch-orange bg-orange-100/70 dark:bg-orange-500/18 border-l-2 border-ch-orange'\n                        : 'text-gray-500 dark:text-gray-400 hover:text-gray-800 dark:hover:text-gray-200 hover:bg-gray-200/40 dark:hover:bg-gray-800/40'}\"\n                    onclick={() => openSingletonTab(item.type, item.label)}\n                  >\n                    <item.icon size={15} />\n                    <span class=\"truncate\">{item.label}</span>\n                    {#if item.pro && !licensedPro}\n                      <span class=\"ml-auto text-[10px] uppercase tracking-wider text-ch-orange font-semibold\">Pro</span>\n                    {/if}\n                  </button>\n                {/if}\n              {/each}\n            </div>\n\n            <!-- Connection info + actions (expanded) -->\n            <div class=\"px-3.5 py-2.5 border-t border-gray-200 dark:border-gray-800 shrink-0\">\n              {#if session}\n                <div class=\"flex items-center gap-2 mb-1.5\">\n                  <Database size={14} class=\"text-ch-blue shrink-0\" />\n                  <span class=\"text-[13px] font-semibold text-gray-700 dark:text-gray-300 truncate\">{session.connectionName ?? 'CH-UI'}</span>\n                  <span\n                    class=\"w-1.5 h-1.5 rounded-full shrink-0 {session.connectionOnline ? 'bg-green-500' : 'bg-red-500'}\"\n                    title={session.connectionOnline ? 'Connected' : 'Disconnected'}\n                  ></span>\n                  <span class=\"text-[10px] text-gray-400 truncate\">{session.user}</span>\n                </div>\n              {/if}\n              <div class=\"flex items-center gap-1\">\n                {#if session?.appVersion}\n                  <span class=\"text-[10px] text-gray-400 dark:text-gray-600 mr-auto\">ch-ui {session.appVersion}</span>\n                {/if}\n                <div class=\"flex items-center gap-0.5 ml-auto\">\n                  <button\n                    class=\"p-1 rounded text-gray-500 dark:text-gray-400 hover:text-gray-800 dark:hover:text-gray-200 hover:bg-gray-200/40 dark:hover:bg-gray-800/40 transition-colors\"\n                    onclick={toggleTheme}\n                    title=\"Toggle theme\"\n                  >\n                    {#if getTheme() === 'dark'}\n                      <Sun size={14} />\n                    {:else}\n                      <Moon size={14} />\n                    {/if}\n                  </button>\n                  {#if session}\n                    <button\n                      class=\"p-1 rounded text-gray-500 dark:text-gray-400 hover:text-red-400 hover:bg-gray-200/40 dark:hover:bg-gray-800/40 transition-colors\"\n                      onclick={logout}\n                      title=\"Logout\"\n                    >\n                      <LogOut size={14} />\n                    </button>\n                  {/if}\n                  <!-- Collapse button at bottom (expanded state) -->\n                  <button\n                    class=\"p-1 rounded text-gray-500 dark:text-gray-400 hover:text-ch-blue dark:hover:text-ch-blue hover:bg-gray-200/40 dark:hover:bg-gray-800/40 transition-colors\"\n                    onclick={toggleCollapse}\n                    title=\"Collapse sidebar (⌘B)\"\n                  >\n                    <ChevronLeft size={14} />\n                  </button>\n                </div>\n              </div>\n            </div>\n          {/if}\n        </div>\n      </div>\n    {/if}\n  </nav>\n\n  <!-- Drag handle (horizontal width resize) -->\n  {#if !collapsed}\n    <!-- svelte-ignore a11y_no_static_element_interactions -->\n    <div\n      class=\"w-1 shrink-0 cursor-col-resize group flex items-center justify-center hover:bg-ch-blue/20 transition-colors {dragging ? 'bg-ch-blue/30' : ''}\"\n      onmousedown={onDragStart}\n    >\n      <div class=\"h-8 w-0.5 rounded-full {dragging ? 'bg-ch-blue' : 'bg-gray-300 dark:bg-gray-700 group-hover:bg-ch-blue/60'} transition-colors\"></div>\n    </div>\n  {/if}\n</div>\n\n<!-- Drag overlay -->\n{#if dragging}\n  <div class=\"fixed inset-0 z-50 cursor-col-resize\"></div>\n{/if}"
  },
  {
    "path": "ui/src/lib/components/layout/TabBar.svelte",
    "content": "<script lang=\"ts\">\n  import {\n    getGroupTabs,\n    getGroupActiveTabId,\n    setActiveTab,\n    openQueryTab,\n    closeTab,\n    renameTab,\n    reorderTab,\n    splitTab,\n    moveTabToGroup,\n    isSplit,\n    setFocusedGroup,\n    getTabs,\n    getFocusedGroupId,\n  } from '../../stores/tabs.svelte'\n  import type { QueryTab, ModelTab, Tab } from '../../stores/tabs.svelte'\n  import {\n    Plus,\n    X,\n    SquareTerminal,\n    Table2,\n    Bookmark,\n    LayoutDashboard,\n    Clock,\n    Brain,\n    Shield,\n    Settings,\n    Columns2,\n    Database,\n    CopyPlus,\n    Pencil,\n    PanelLeft,\n    PanelRight,\n    House,\n    Workflow,\n    Boxes,\n  } from 'lucide-svelte'\n  import ContextMenu, { type ContextMenuItem } from '../common/ContextMenu.svelte'\n  import ConfirmDialog from '../common/ConfirmDialog.svelte'\n\n  interface Props {\n    groupId: string\n  }\n\n  let { groupId }: Props = $props()\n\n  const tabs = $derived(getGroupTabs(groupId))\n  const activeId = $derived(getGroupActiveTabId(groupId))\n  const split = $derived(isSplit())\n  const focusedGroupId = $derived(getFocusedGroupId())\n\n  let editingTabId = $state<string | null>(null)\n  let editingName = $state('')\n\n  let dragTabId = $state<string | null>(null)\n  let dropTargetIndex = $state<number | null>(null)\n  let splitDropActive = $state(false)\n\n  let tabMenu = $state<{ tabId: string; x: number; y: number } | null>(null)\n  let pendingCloseIds = $state<string[]>([])\n  let closeConfirmOpen = $state(false)\n  let closeConfirmDescription = $state('')\n\n  const iconMap: Record<string, typeof SquareTerminal> = {\n    'home': House,\n    'query': SquareTerminal,\n    'table': Table2,\n    'database': Database,\n    'dashboard': LayoutDashboard,\n    'saved-queries': Bookmark,\n    'dashboards': LayoutDashboard,\n    'schedules': Clock,\n    'brain': Brain,\n    'admin': Shield,\n    'governance': Shield,\n    'pipelines': Workflow,\n    'model': Boxes,\n    'settings': Settings,\n  }\n\n  function isHomeTab(tab: Tab): boolean {\n    return tab.type === 'home'\n  }\n\n  function getIcon(tab: Tab) {\n    if (tab.type === 'query' && (tab as QueryTab).savedQueryId) return Bookmark\n    return iconMap[tab.type] ?? SquareTerminal\n  }\n\n  function hideTabMenu() {\n    tabMenu = null\n  }\n\n  function openTabMenu(e: MouseEvent, tabId: string) {\n    const tab = getTabs().find((entry) => entry.id === tabId)\n    if (tab && isHomeTab(tab)) return\n    e.preventDefault()\n    e.stopPropagation()\n    tabMenu = { tabId, x: e.clientX, y: e.clientY }\n  }\n\n  function getMenuTab(): Tab | undefined {\n    if (!tabMenu) return undefined\n    return getTabs().find(t => t.id === tabMenu?.tabId)\n  }\n\n  function startRename(tab: Tab) {\n    editingTabId = tab.id\n    editingName = tab.name\n    hideTabMenu()\n  }\n\n  function commitRename() {\n    if (editingTabId && editingName.trim()) {\n      renameTab(editingTabId, editingName.trim())\n    }\n    editingTabId = null\n  }\n\n  function handleRenameKeydown(e: KeyboardEvent) {\n    if (e.key === 'Enter') commitRename()\n    if (e.key === 'Escape') editingTabId = null\n  }\n\n  function handleMiddleClick(e: MouseEvent, tabId: string) {\n    if (e.button === 1) {\n      const tab = getTabs().find((entry) => entry.id === tabId)\n      if (!tab || isHomeTab(tab)) return\n      e.preventDefault()\n      requestCloseTabs([tabId])\n    }\n  }\n\n  function handleTabClick(tabId: string) {\n    setFocusedGroup(groupId)\n    setActiveTab(tabId, groupId)\n  }\n\n  function duplicateQueryTab(tab: Tab | undefined) {\n    if (!tab || tab.type !== 'query') return\n    openQueryTab((tab as QueryTab).sql, groupId)\n    hideTabMenu()\n  }\n\n  function closeOthers(tabId: string) {\n    const otherIds = getTabs().map(t => t.id).filter(id => id !== tabId)\n    requestCloseTabs(otherIds)\n    hideTabMenu()\n  }\n\n  function closeTabsToSide(tabId: string, side: 'left' | 'right') {\n    const idx = tabs.findIndex(t => t.id === tabId)\n    if (idx === -1) return\n    const targets = side === 'left' ? tabs.slice(0, idx) : tabs.slice(idx + 1)\n    requestCloseTabs(targets.map((tab) => tab.id))\n    hideTabMenu()\n  }\n\n  function splitFromMenu(tabId: string) {\n    splitTab(tabId)\n    hideTabMenu()\n  }\n\n  function resetPendingClose() {\n    pendingCloseIds = []\n    closeConfirmDescription = ''\n    closeConfirmOpen = false\n  }\n\n  function requestCloseTabs(tabIds: string[]) {\n    const uniqueIds = Array.from(new Set(tabIds)).filter((id) => {\n      const tab = getTabs().find((entry) => entry.id === id)\n      return !!tab && !isHomeTab(tab)\n    })\n    if (uniqueIds.length === 0) return\n\n    const dirtyTabs = uniqueIds\n      .map((id) => getTabs().find((tab) => tab.id === id))\n      .filter((tab): tab is QueryTab | ModelTab => {\n        if (!tab) return false\n        if (tab.type === 'query') return !!(tab as QueryTab).dirty\n        if (tab.type === 'model') return !!(tab as ModelTab).dirty\n        return false\n      })\n\n    if (dirtyTabs.length === 0) {\n      uniqueIds.forEach((id) => closeTab(id))\n      return\n    }\n\n    pendingCloseIds = uniqueIds\n    if (dirtyTabs.length === 1 && uniqueIds.length === 1) {\n      closeConfirmDescription = `\"${dirtyTabs[0].name}\" has unsaved changes. Close without saving?`\n    } else {\n      closeConfirmDescription = `${dirtyTabs.length} tab(s) have unsaved changes. Close selected tabs anyway?`\n    }\n    closeConfirmOpen = true\n  }\n\n  function confirmCloseTabs() {\n    pendingCloseIds.forEach((id) => closeTab(id))\n    resetPendingClose()\n  }\n\n  // ── Drag-to-reorder ──────────────────────────────────────────\n  function handleDragStart(e: DragEvent, tab: Tab) {\n    if (isHomeTab(tab)) {\n      e.preventDefault()\n      return\n    }\n    if (!e.dataTransfer) return\n    dragTabId = tab.id\n    splitDropActive = false\n    e.dataTransfer.effectAllowed = 'move'\n    e.dataTransfer.setData('text/plain', tab.id)\n    e.dataTransfer.setData('application/x-tab-group', groupId)\n  }\n\n  function handleDragOver(e: DragEvent, index: number) {\n    e.preventDefault()\n    if (e.dataTransfer) e.dataTransfer.dropEffect = 'move'\n    dropTargetIndex = index\n  }\n\n  function handleDrop(e: DragEvent, toIndex: number) {\n    e.preventDefault()\n    const tabId = e.dataTransfer?.getData('text/plain')\n    const sourceGroup = e.dataTransfer?.getData('application/x-tab-group')\n\n    if (!tabId) return\n\n    if (sourceGroup && sourceGroup !== groupId) {\n      moveTabToGroup(tabId, groupId)\n    } else if (dragTabId) {\n      const fromIndex = tabs.findIndex(t => t.id === dragTabId)\n      if (fromIndex !== -1 && fromIndex !== toIndex) {\n        reorderTab(groupId, fromIndex, toIndex)\n      }\n    }\n\n    dragTabId = null\n    dropTargetIndex = null\n    splitDropActive = false\n  }\n\n  function handleContainerDrop(e: DragEvent) {\n    e.preventDefault()\n    const tabId = e.dataTransfer?.getData('text/plain')\n    const sourceGroup = e.dataTransfer?.getData('application/x-tab-group')\n    if (tabId && sourceGroup && sourceGroup !== groupId) {\n      moveTabToGroup(tabId, groupId)\n    }\n    dragTabId = null\n    dropTargetIndex = null\n  }\n\n  function handleContainerDragOver(e: DragEvent) {\n    e.preventDefault()\n    if (e.dataTransfer) e.dataTransfer.dropEffect = 'move'\n  }\n\n  function handleDragEnd() {\n    dragTabId = null\n    dropTargetIndex = null\n    splitDropActive = false\n  }\n\n  function handleSplit(e: MouseEvent, tabId: string) {\n    e.stopPropagation()\n    splitTab(tabId)\n  }\n\n  function handleSplitDropOver(e: DragEvent) {\n    e.preventDefault()\n    splitDropActive = true\n    if (e.dataTransfer) e.dataTransfer.dropEffect = 'move'\n  }\n\n  function handleSplitDropLeave() {\n    splitDropActive = false\n  }\n\n  function handleSplitDrop(e: DragEvent) {\n    e.preventDefault()\n    const tabId = e.dataTransfer?.getData('text/plain')\n    if (!tabId) return\n    splitTab(tabId)\n    dragTabId = null\n    dropTargetIndex = null\n    splitDropActive = false\n  }\n\n  function shouldIgnoreShortcutTarget(target: EventTarget | null): boolean {\n    if (!(target instanceof HTMLElement)) return false\n    return target.tagName === 'INPUT' ||\n      target.tagName === 'TEXTAREA' ||\n      target.tagName === 'SELECT' ||\n      target.isContentEditable\n  }\n\n  function handleGlobalTabShortcuts(e: KeyboardEvent) {\n    if (focusedGroupId !== groupId) return\n    if (editingTabId) return\n\n    const mod = e.metaKey || e.ctrlKey\n    const currentTab = tabs.find((t) => t.id === activeId)\n    if (!currentTab) return\n\n    if ((e.altKey && e.key.toLowerCase() === 'w') || (mod && e.shiftKey && e.key.toLowerCase() === 'w')) {\n      if (isHomeTab(currentTab)) return\n      e.preventDefault()\n      requestCloseTabs([currentTab.id])\n      return\n    }\n\n    if (((e.altKey && e.key.toLowerCase() === 'd') || (mod && e.shiftKey && e.key.toLowerCase() === 'd')) && currentTab.type === 'query') {\n      e.preventDefault()\n      duplicateQueryTab(currentTab)\n      return\n    }\n\n    if (e.altKey && e.key.toLowerCase() === 's') {\n      e.preventDefault()\n      splitFromMenu(currentTab.id)\n      return\n    }\n\n    if (e.key === 'F2' && !shouldIgnoreShortcutTarget(e.target)) {\n      e.preventDefault()\n      startRename(currentTab)\n    }\n  }\n\n  function getTabMenuItems(): ContextMenuItem[] {\n    if (!tabMenu) return []\n    const menuTab = getTabs().find(t => t.id === tabMenu?.tabId)\n    if (!menuTab) return []\n    if (isHomeTab(menuTab)) return []\n\n    const idxInGroup = tabs.findIndex(t => t.id === menuTab.id)\n    const canCloseLeft = idxInGroup > 0 && tabs.slice(0, idxInGroup).some((tab) => !isHomeTab(tab))\n    const canCloseRight = idxInGroup >= 0 && tabs.slice(idxInGroup + 1).some((tab) => !isHomeTab(tab))\n    const canCloseOthers = getTabs().filter((tab) => !isHomeTab(tab) && tab.id !== menuTab.id).length > 0\n\n    const items: ContextMenuItem[] = []\n\n    if (menuTab.type === 'query') {\n      items.push({\n        id: 'duplicate',\n        label: 'Duplicate Query',\n        icon: CopyPlus,\n        shortcut: 'Alt+D',\n        onSelect: () => duplicateQueryTab(menuTab),\n      })\n    }\n\n    items.push({\n      id: 'rename',\n      label: 'Rename',\n      icon: Pencil,\n      shortcut: 'F2',\n      onSelect: () => startRename(menuTab),\n    })\n\n    items.push({\n      id: 'split',\n      label: split ? 'Move To Other Pane' : 'Split To New Pane',\n      icon: Columns2,\n      shortcut: 'Alt+S',\n      onSelect: () => splitFromMenu(menuTab.id),\n    })\n\n    if (canCloseLeft || canCloseRight || canCloseOthers) {\n      items.push({ id: 'sep-close', separator: true })\n    }\n\n    if (canCloseLeft) {\n      items.push({\n        id: 'close-left',\n        label: 'Close Tabs To Left',\n        icon: PanelLeft,\n        onSelect: () => closeTabsToSide(menuTab.id, 'left'),\n      })\n    }\n    if (canCloseRight) {\n      items.push({\n        id: 'close-right',\n        label: 'Close Tabs To Right',\n        icon: PanelRight,\n        onSelect: () => closeTabsToSide(menuTab.id, 'right'),\n      })\n    }\n    if (canCloseOthers) {\n      items.push({\n        id: 'close-others',\n        label: 'Close Other Tabs',\n        onSelect: () => closeOthers(menuTab.id),\n      })\n    }\n\n    items.push({ id: 'sep-end', separator: true })\n    items.push({\n      id: 'close',\n      label: 'Close',\n      icon: X,\n      shortcut: 'Alt+W',\n      danger: true,\n      onSelect: () => {\n        requestCloseTabs([menuTab.id])\n        hideTabMenu()\n      },\n    })\n\n    return items\n  }\n</script>\n\n<svelte:window\n  onkeydown={(e) => {\n    if (e.key === 'Escape') hideTabMenu()\n    handleGlobalTabShortcuts(e)\n  }}\n/>\n\n<!-- svelte-ignore a11y_no_static_element_interactions -->\n<div\n  class=\"relative flex items-center border-b border-gray-200 dark:border-gray-800 bg-white dark:bg-gray-950 overflow-x-auto shrink-0 whitespace-nowrap\"\n  ondragover={handleContainerDragOver}\n  ondrop={handleContainerDrop}\n>\n  {#each tabs as tab, i (tab.id)}\n    {@const Icon = getIcon(tab)}\n    <!-- svelte-ignore a11y_no_static_element_interactions -->\n    <div\n      class=\"group/tab flex items-center gap-2 h-9 text-[13px] border-r border-gray-200 dark:border-gray-800 shrink-0 cursor-pointer select-none\n        {isHomeTab(tab) ? 'px-2.5 w-10 justify-center' : 'px-3.5 max-w-[260px]'}\n        {tab.id === activeId\n          ? 'bg-gray-50 dark:bg-gray-900 text-gray-800 dark:text-gray-200 border-b-2 border-b-ch-blue'\n          : 'text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 hover:bg-gray-100/50 dark:hover:bg-gray-900/50'}\n        {dragTabId === tab.id ? 'opacity-40' : ''}\n        {dropTargetIndex === i && dragTabId !== tab.id ? 'border-l-2 !border-l-ch-blue' : ''}\"\n      draggable={!isHomeTab(tab)}\n      onclick={() => handleTabClick(tab.id)}\n      oncontextmenu={(e) => openTabMenu(e, tab.id)}\n      ondblclick={() => tab.type === 'query' && !isHomeTab(tab) && startRename(tab)}\n      onmousedown={(e) => handleMiddleClick(e, tab.id)}\n      onkeydown={(e) => {\n        if (e.key === 'Enter' || e.key === ' ') {\n          e.preventDefault()\n          handleTabClick(tab.id)\n        }\n      }}\n      ondragstart={(e) => handleDragStart(e, tab)}\n      ondragover={(e) => handleDragOver(e, i)}\n      ondrop={(e) => handleDrop(e, i)}\n      ondragend={handleDragEnd}\n      role=\"tab\"\n      tabindex=\"0\"\n      aria-selected={tab.id === activeId}\n      title={tab.name}\n    >\n      <Icon size={13} class=\"shrink-0\" />\n\n      {#if editingTabId === tab.id}\n        <input\n          type=\"text\"\n          class=\"w-full bg-transparent border-b border-ch-blue text-[13px] outline-none text-gray-800 dark:text-gray-200 leading-none\"\n          bind:value={editingName}\n          onblur={commitRename}\n          onkeydown={handleRenameKeydown}\n          onclick={(e) => e.stopPropagation()}\n        />\n      {:else}\n        {#if isHomeTab(tab)}\n          <span class=\"sr-only\">{tab.name}</span>\n        {:else}\n          <span class=\"truncate leading-none\">{tab.name}</span>\n        {/if}\n      {/if}\n\n      {#if (tab.type === 'query' && tab.dirty) || (tab.type === 'model' && (tab as ModelTab).dirty)}\n        <span class=\"w-1.5 h-1.5 rounded-full bg-ch-orange shrink-0\"></span>\n      {/if}\n\n      {#if getTabs().length > 1 && !isHomeTab(tab)}\n        <button\n          class=\"p-0.5 rounded text-gray-400 hover:text-ch-blue hover:bg-gray-300 dark:hover:bg-gray-700 shrink-0 opacity-0 group-hover/tab:opacity-100 transition-opacity\"\n          onclick={(e) => handleSplit(e, tab.id)}\n          title=\"Split tab\"\n        >\n          <Columns2 size={11} />\n        </button>\n      {/if}\n\n      {#if !isHomeTab(tab)}\n        <button\n          class=\"ml-auto p-0.5 rounded hover:bg-gray-300 dark:hover:bg-gray-700 text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 shrink-0\"\n          onclick={(e: MouseEvent) => { e.stopPropagation(); requestCloseTabs([tab.id]) }}\n          title=\"Close tab\"\n        >\n          <X size={13} />\n        </button>\n      {/if}\n    </div>\n  {/each}\n\n  <button\n    class=\"px-2.5 h-9 text-gray-400 dark:text-gray-600 hover:text-gray-700 dark:hover:text-gray-300 hover:bg-gray-200/50 dark:hover:bg-gray-800/50 shrink-0\"\n    onclick={() => openQueryTab('', groupId)}\n    title=\"New query\"\n  >\n    <Plus size={15} />\n  </button>\n\n  {#if !split && dragTabId}\n    <!-- svelte-ignore a11y_no_static_element_interactions -->\n    <div\n      class=\"mx-1 my-1 px-2.5 py-1 text-[11px] rounded-md border border-dashed transition-colors shrink-0\n      {splitDropActive\n        ? 'border-ch-blue text-ch-blue bg-ch-blue/10'\n        : 'border-gray-300 dark:border-gray-700 text-gray-500 dark:text-gray-400'}\"\n      ondragover={handleSplitDropOver}\n      ondragleave={handleSplitDropLeave}\n      ondrop={handleSplitDrop}\n      title=\"Drop tab to split\"\n    >\n      Drop to Split\n    </div>\n  {/if}\n\n</div>\n\n<ContextMenu\n  open={!!tabMenu}\n  x={tabMenu?.x ?? 0}\n  y={tabMenu?.y ?? 0}\n  items={getTabMenuItems()}\n  onclose={hideTabMenu}\n/>\n\n<ConfirmDialog\n  open={closeConfirmOpen}\n  title=\"Discard unsaved changes?\"\n  description={closeConfirmDescription}\n  confirmLabel=\"Close Tabs\"\n  destructive={true}\n  onconfirm={confirmCloseTabs}\n  oncancel={resetPendingClose}\n/>\n"
  },
  {
    "path": "ui/src/lib/components/layout/TabContent.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import { getGroupActiveTab } from '../../stores/tabs.svelte'\n  import type { QueryTab, TableTab, DatabaseTab, DashboardTab, ModelTab } from '../../stores/tabs.svelte'\n  import { loadLicense, isProActive, isLicenseLoading } from '../../stores/license.svelte'\n  import QueryContent from './content/QueryContent.svelte'\n  import TableContent from './content/TableContent.svelte'\n  import DatabaseContent from './content/DatabaseContent.svelte'\n  import ProRequired from '../common/ProRequired.svelte'\n  import SavedQueries from '../../../pages/SavedQueries.svelte'\n  import Settings from '../../../pages/Settings.svelte'\n  import Dashboards from '../../../pages/Dashboards.svelte'\n  import Schedules from '../../../pages/Schedules.svelte'\n  import BrainPage from '../../../pages/Brain.svelte'\n  import Admin from '../../../pages/Admin.svelte'\n  import Governance from '../../../pages/Governance.svelte'\n  import Pipelines from '../../../pages/Pipelines.svelte'\n  import Models from '../../../pages/Models.svelte'\n  import ModelContent from './content/ModelContent.svelte'\n  import Home from '../../../pages/Home.svelte'\n\n  interface Props {\n    groupId: string\n  }\n\n  let { groupId }: Props = $props()\n\n  const activeTab = $derived(getGroupActiveTab(groupId))\n  const proActive = $derived(isProActive())\n  const licenseLoading = $derived(isLicenseLoading())\n  const requiresPro = $derived(!!activeTab && ['schedules', 'governance'].includes(activeTab.type))\n  let licenseChecked = $state(false)\n\n  onMount(() => {\n    void loadLicense().finally(() => {\n      licenseChecked = true\n    })\n  })\n\n  $effect(() => {\n    if (requiresPro && !licenseChecked) {\n      void loadLicense().finally(() => {\n        licenseChecked = true\n      })\n    }\n  })\n\n  function proFeatureLabel(): string {\n    if (!activeTab) return 'this section'\n    switch (activeTab.type) {\n      case 'schedules':\n        return 'Scheduled Jobs'\n      case 'governance':\n        return 'Governance'\n      default:\n        return 'this section'\n    }\n  }\n</script>\n\n<div class=\"flex-1 min-h-0 overflow-hidden\">\n  {#if !activeTab}\n    <div class=\"flex items-center justify-center h-full text-gray-400 dark:text-gray-600 text-sm\">\n      Open a query or select a table to get started\n    </div>\n  {:else if requiresPro && !proActive}\n    {#if licenseLoading || !licenseChecked}\n      <div class=\"flex items-center justify-center h-full text-gray-500 dark:text-gray-400 text-sm\">\n        Checking license...\n      </div>\n    {:else}\n      <ProRequired feature={proFeatureLabel()} />\n    {/if}\n  {:else if activeTab.type === 'query'}\n    {#key activeTab.id}\n      <QueryContent tab={activeTab as QueryTab} />\n    {/key}\n  {:else if activeTab.type === 'table'}\n    {#key activeTab.id}\n      <TableContent tab={activeTab as TableTab} />\n    {/key}\n  {:else if activeTab.type === 'database'}\n    {#key activeTab.id}\n      <DatabaseContent tab={activeTab as DatabaseTab} />\n    {/key}\n  {:else if activeTab.type === 'saved-queries'}\n    <SavedQueries />\n  {:else if activeTab.type === 'settings'}\n    <Settings />\n  {:else if activeTab.type === 'dashboards'}\n    <Dashboards />\n  {:else if activeTab.type === 'dashboard'}\n    {#key activeTab.id}\n      <Dashboards dashboardId={(activeTab as DashboardTab).dashboardId} />\n    {/key}\n  {:else if activeTab.type === 'schedules'}\n    <Schedules />\n  {:else if activeTab.type === 'brain'}\n    <BrainPage />\n  {:else if activeTab.type === 'admin'}\n    <Admin />\n  {:else if activeTab.type === 'governance'}\n    <Governance />\n  {:else if activeTab.type === 'pipelines'}\n    <Pipelines />\n  {:else if activeTab.type === 'model'}\n    {#key activeTab.id}\n      <ModelContent tab={activeTab as ModelTab} />\n    {/key}\n  {:else if activeTab.type === 'models'}\n    <Models />\n  {:else if activeTab.type === 'home'}\n    <Home />\n  {/if}\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/layout/TabGroup.svelte",
    "content": "<script lang=\"ts\">\n  import TabBar from './TabBar.svelte'\n  import TabContent from './TabContent.svelte'\n  import { getFocusedGroupId } from '../../stores/tabs.svelte'\n\n  interface Props {\n    groupId: string\n  }\n\n  let { groupId }: Props = $props()\n\n  const isFocused = $derived(getFocusedGroupId() === groupId)\n</script>\n\n<div class=\"flex flex-col flex-1 min-w-0 min-h-0 {isFocused ? 'border-t-2 border-t-ch-blue' : 'border-t-2 border-t-transparent'}\">\n  <TabBar {groupId} />\n  <TabContent {groupId} />\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/layout/content/DatabaseContent.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import type { DatabaseTab } from '../../../stores/tabs.svelte'\n  import type { ColumnMeta } from '../../../types/query'\n  import { fetchDatabaseInfo, fetchDatabaseTables } from '../../../api/query'\n  import { formatBytes, formatNumber } from '../../../utils/format'\n  import Spinner from '../../common/Spinner.svelte'\n  import VirtualTable from '../../table/VirtualTable.svelte'\n  import { Database, HardDrive, Rows3, Table2, Clock, RefreshCw, FolderOpen } from 'lucide-svelte'\n\n  interface Props {\n    tab: DatabaseTab\n  }\n\n  let { tab }: Props = $props()\n\n  type SubTab = 'overview' | 'tables'\n  let activeSubTab = $state<SubTab>('overview')\n\n  let dbInfo = $state<Record<string, any>>({})\n  let infoLoading = $state(true)\n  let infoError = $state<string | null>(null)\n\n  let tablesMeta = $state<ColumnMeta[]>([])\n  let tablesData = $state<unknown[][]>([])\n  let tablesLoading = $state(false)\n  let tablesError = $state<string | null>(null)\n  let tablesLoaded = $state(false)\n\n  const subTabs: { id: SubTab; label: string }[] = [\n    { id: 'overview', label: 'Overview' },\n    { id: 'tables', label: 'Tables' },\n  ]\n\n  const metrics = $derived.by(() => {\n    if (!dbInfo || Object.keys(dbInfo).length === 0) return []\n    return [\n      { label: 'Tables', value: formatNumber(Number(dbInfo.table_count ?? 0)), icon: Table2, color: 'text-ch-orange' },\n      { label: 'Rows', value: formatNumber(Number(dbInfo.total_rows ?? 0)), icon: Rows3, color: 'text-ch-orange' },\n      { label: 'Total Size', value: formatBytes(Number(dbInfo.total_bytes ?? 0)), icon: HardDrive, color: 'text-ch-green' },\n      { label: 'Engine', value: dbInfo.engine ?? '—', icon: Database, color: 'text-gray-500' },\n    ]\n  })\n\n  function formatDate(value: unknown): string {\n    if (!value) return '—'\n    const date = new Date(String(value))\n    if (Number.isNaN(date.getTime())) return String(value)\n    return date.toLocaleString()\n  }\n\n  async function loadInfo() {\n    infoLoading = true\n    infoError = null\n    try {\n      dbInfo = await fetchDatabaseInfo(tab.database)\n    } catch (e: any) {\n      infoError = e.message\n    } finally {\n      infoLoading = false\n    }\n  }\n\n  async function loadTables() {\n    if (tablesLoaded) return\n    tablesLoading = true\n    tablesError = null\n    try {\n      const res = await fetchDatabaseTables(tab.database)\n      tablesMeta = res.meta ?? []\n      tablesData = (res.data ?? []).map((row: any) => {\n        if (Array.isArray(row)) return row\n        return (res.meta ?? []).map((col: any) => row[col.name])\n      })\n      tablesLoaded = true\n    } catch (e: any) {\n      tablesError = e.message\n    } finally {\n      tablesLoading = false\n    }\n  }\n\n  function switchTab(next: SubTab) {\n    activeSubTab = next\n    if (next === 'tables') loadTables()\n  }\n\n  function refresh() {\n    tablesLoaded = false\n    if (activeSubTab === 'tables') loadTables()\n    loadInfo()\n  }\n\n  onMount(() => {\n    loadInfo()\n  })\n</script>\n\n<div class=\"flex flex-col h-full\">\n  <div class=\"flex items-center gap-3 px-4 py-3 border-b border-gray-200 dark:border-gray-800 bg-gray-100/40 dark:bg-gray-900/45 shrink-0\">\n    <Database size={16} class=\"text-ch-orange shrink-0\" />\n    <div class=\"min-w-0\">\n      <span class=\"text-sm font-medium text-gray-800 dark:text-gray-200 truncate\">{tab.database}</span>\n    </div>\n    <button\n      class=\"ml-1 p-1 rounded hover:bg-gray-200 dark:hover:bg-gray-800 transition-colors text-gray-400 hover:text-gray-600 dark:hover:text-gray-300\"\n      onclick={refresh}\n      title=\"Refresh database info\"\n    >\n      <RefreshCw size={13} class={infoLoading || tablesLoading ? 'animate-spin' : ''} />\n    </button>\n  </div>\n\n  <div class=\"flex items-center gap-1 px-3 py-1.5 border-b border-gray-200 dark:border-gray-800 bg-white dark:bg-gray-950 shrink-0\">\n    {#each subTabs as st}\n      <button\n        class=\"px-3 py-1.5 text-xs rounded-md transition-colors whitespace-nowrap {activeSubTab === st.id\n          ? 'bg-gray-200 dark:bg-gray-800 text-gray-800 dark:text-gray-200 font-medium'\n          : 'text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 hover:bg-gray-200/50 dark:hover:bg-gray-800/50'}\"\n        onclick={() => switchTab(st.id)}\n      >{st.label}</button>\n    {/each}\n  </div>\n\n  <div class=\"flex-1 min-h-0 overflow-auto\">\n    {#if activeSubTab === 'overview'}\n      {#if infoLoading}\n        <div class=\"flex items-center justify-center py-12 gap-2\">\n          <Spinner size=\"sm\" />\n          <span class=\"text-sm text-gray-500\">Loading database info...</span>\n        </div>\n      {:else if infoError}\n        <div class=\"p-4\">\n          <div class=\"bg-red-100/20 dark:bg-red-900/20 border border-red-300/50 dark:border-red-800/50 rounded-lg p-3 text-sm text-red-700 dark:text-red-300\">{infoError}</div>\n        </div>\n      {:else}\n        <div class=\"p-4 space-y-4\">\n          <div class=\"grid grid-cols-2 xl:grid-cols-4 gap-3\">\n            {#each metrics as m}\n              {@const Icon = m.icon}\n              <div class=\"bg-gray-50 dark:bg-gray-900 border border-gray-200 dark:border-gray-800 rounded-xl p-4\">\n                <div class=\"flex items-center gap-2 mb-2\">\n                  <Icon size={14} class={m.color} />\n                  <span class=\"text-xs text-gray-500 uppercase tracking-wider\">{m.label}</span>\n                </div>\n                <div class=\"text-xl font-semibold text-gray-800 dark:text-gray-200 truncate\">{m.value}</div>\n              </div>\n            {/each}\n          </div>\n\n          <div class=\"bg-gray-50 dark:bg-gray-900 border border-gray-200 dark:border-gray-800 rounded-xl p-4\">\n            <h3 class=\"text-xs text-gray-500 uppercase tracking-wider mb-3\">Details</h3>\n            <div class=\"grid grid-cols-[140px_1fr] gap-y-2 gap-x-3 text-sm\">\n              <span class=\"text-gray-500\">Name</span>\n              <code class=\"font-mono text-gray-800 dark:text-gray-200 break-all\">{dbInfo.name ?? tab.database}</code>\n\n              <span class=\"text-gray-500\">Engine</span>\n              <span class=\"text-gray-800 dark:text-gray-200\">{dbInfo.engine ?? '—'}</span>\n\n              <span class=\"text-gray-500\">Data Path</span>\n              <span class=\"flex items-center gap-2 text-gray-800 dark:text-gray-200 break-all\">\n                <FolderOpen size={13} class=\"text-gray-500 shrink-0\" />\n                <code class=\"font-mono text-xs\">{dbInfo.data_path ?? '—'}</code>\n              </span>\n\n              <span class=\"text-gray-500\">Metadata Path</span>\n              <code class=\"font-mono text-xs text-gray-800 dark:text-gray-200 break-all\">{dbInfo.metadata_path ?? '—'}</code>\n\n              <span class=\"text-gray-500\">Last Modified</span>\n              <span class=\"flex items-center gap-2 text-gray-800 dark:text-gray-200\">\n                <Clock size={13} class=\"text-gray-500 shrink-0\" />\n                {formatDate(dbInfo.last_modified)}\n              </span>\n            </div>\n          </div>\n        </div>\n      {/if}\n    {:else if activeSubTab === 'tables'}\n      {#if tablesLoading}\n        <div class=\"flex items-center justify-center py-12 gap-2\">\n          <Spinner size=\"sm\" />\n          <span class=\"text-sm text-gray-500\">Loading tables...</span>\n        </div>\n      {:else if tablesError}\n        <div class=\"p-4\">\n          <div class=\"bg-red-100/20 dark:bg-red-900/20 border border-red-300/50 dark:border-red-800/50 rounded-lg p-3 text-sm text-red-700 dark:text-red-300\">{tablesError}</div>\n        </div>\n      {:else if tablesMeta.length > 0}\n        <VirtualTable meta={tablesMeta} data={tablesData} />\n      {:else}\n        <div class=\"flex items-center justify-center py-12 text-gray-400 dark:text-gray-600 text-sm\">No tables found</div>\n      {/if}\n    {/if}\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/layout/content/ModelContent.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import type { ModelTab } from '../../../stores/tabs.svelte'\n  import { updateModelTabEdit, markModelTabSaved, updateModelTabStatus } from '../../../stores/tabs.svelte'\n  import type { ModelRunResult } from '../../../types/models'\n  import * as api from '../../../api/models'\n  import { refreshModelCache } from '../../../editor/completions'\n  import { success as toastSuccess, error as toastError } from '../../../stores/toast.svelte'\n  import SqlEditor from '../../editor/SqlEditor.svelte'\n  import {\n    Play,\n    Save,\n    Eye,\n    Table2,\n    FileText,\n    AlertCircle,\n    CheckCircle,\n    XCircle,\n    Clock,\n    Loader2,\n    Code,\n  } from 'lucide-svelte'\n\n  interface Props {\n    tab: ModelTab\n  }\n\n  let { tab }: Props = $props()\n\n  let sqlEditor = $state<SqlEditor | undefined>(undefined)\n  let saving = $state(false)\n  let running = $state(false)\n  let showDescription = $state(false)\n\n  // Split panel\n  const savedSplit = parseFloat(localStorage.getItem('ch-ui-model-split-percent') ?? '60')\n  let splitPercent = $state(isNaN(savedSplit) ? 60 : savedSplit)\n  let dragging = $state(false)\n  let containerEl: HTMLDivElement\n\n  // Run output\n  let runResult = $state<ModelRunResult | null>(null)\n  let runLoading = $state(false)\n\n  onMount(() => {\n    showDescription = !!tab.edit.description\n    loadLatestRun()\n  })\n\n  async function loadLatestRun() {\n    try {\n      const res = await api.listModelRuns(5, 0)\n      const runs = res.runs ?? []\n      for (const run of runs) {\n        const detail = await api.getModelRun(run.id)\n        const result = (detail.results ?? []).find(r => r.model_id === tab.modelId)\n        if (result) {\n          runResult = result\n          return\n        }\n      }\n    } catch {\n      // no run to show\n    }\n  }\n\n  function handleSQLChange(sql: string) {\n    updateModelTabEdit(tab.id, { sqlBody: sql })\n  }\n\n  async function handleSave() {\n    saving = true\n    try {\n      const sqlValue = sqlEditor?.getValue() ?? tab.edit.sqlBody\n      const res = await api.updateModel(tab.modelId, {\n        name: tab.edit.modelName,\n        description: tab.edit.description,\n        target_database: tab.edit.targetDatabase,\n        materialization: tab.edit.materialization as 'view' | 'table',\n        sql_body: sqlValue,\n        table_engine: tab.edit.tableEngine,\n        order_by: tab.edit.orderBy,\n      })\n      refreshModelCache()\n      markModelTabSaved(tab.id, {\n        name: res.model.name,\n        status: res.model.status,\n        last_error: res.model.last_error,\n      })\n      toastSuccess('Model saved')\n    } catch (e: unknown) {\n      toastError((e as Error).message || 'Failed to save model')\n    } finally {\n      saving = false\n    }\n  }\n\n  async function handleRun() {\n    running = true\n    runResult = null\n    try {\n      const res = await api.runSingleModel(tab.modelId)\n      toastSuccess('Model run started')\n\n      // Fetch the run result\n      if (res.run_id) {\n        runLoading = true\n        // Poll briefly for result completion\n        let attempts = 0\n        while (attempts < 10) {\n          await new Promise(r => setTimeout(r, 1000))\n          try {\n            const detail = await api.getModelRun(res.run_id)\n            const result = (detail.results ?? []).find(r => r.model_id === tab.modelId)\n            if (result && result.status !== 'pending' && result.status !== 'running') {\n              runResult = result\n              // Update tab status\n              const modelRes = await api.getModel(tab.modelId)\n              if (modelRes.model) {\n                updateModelTabStatus(tab.id, modelRes.model.status, modelRes.model.last_error)\n              }\n              break\n            }\n          } catch {\n            break\n          }\n          attempts++\n        }\n        runLoading = false\n      }\n    } catch (e: unknown) {\n      toastError((e as Error).message || 'Failed to run model')\n    } finally {\n      running = false\n    }\n  }\n\n  function statusBadgeClass(status: string): string {\n    switch (status) {\n      case 'success': return 'bg-green-100 text-green-700 dark:bg-green-900/30 dark:text-green-400'\n      case 'error': return 'bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-400'\n      case 'running': return 'bg-blue-100 text-blue-700 dark:bg-blue-900/30 dark:text-blue-400'\n      default: return 'bg-gray-100 text-gray-600 dark:bg-gray-800 dark:text-gray-400'\n    }\n  }\n\n  function statusDot(status: string): string {\n    switch (status) {\n      case 'success': return 'bg-green-500'\n      case 'error': return 'bg-red-500'\n      default: return 'bg-gray-400'\n    }\n  }\n\n  // ── Drag handle logic ──\n  function onDragStart(e: MouseEvent) {\n    e.preventDefault()\n    dragging = true\n    document.addEventListener('mousemove', onDragMove)\n    document.addEventListener('mouseup', onDragEnd)\n  }\n\n  function onDragMove(e: MouseEvent) {\n    if (!containerEl) return\n    const rect = containerEl.getBoundingClientRect()\n    const y = e.clientY - rect.top\n    const pct = (y / rect.height) * 100\n    splitPercent = Math.max(20, Math.min(85, pct))\n  }\n\n  function onDragEnd() {\n    dragging = false\n    document.removeEventListener('mousemove', onDragMove)\n    document.removeEventListener('mouseup', onDragEnd)\n    localStorage.setItem('ch-ui-model-split-percent', String(splitPercent))\n  }\n</script>\n\n<div class=\"flex flex-col h-full overflow-hidden\" bind:this={containerEl}>\n  <!-- Config toolbar -->\n  <div class=\"flex items-center gap-2 px-3 py-2 border-b border-gray-200 dark:border-gray-700 bg-gray-50 dark:bg-gray-800/50 shrink-0 flex-wrap\">\n    <input\n      type=\"text\"\n      value={tab.edit.modelName}\n      oninput={(e) => updateModelTabEdit(tab.id, { modelName: (e.target as HTMLInputElement).value })}\n      class=\"text-sm font-semibold bg-transparent border-0 border-b border-transparent hover:border-gray-300 dark:hover:border-gray-600 focus:border-orange-400 focus:outline-none text-gray-800 dark:text-gray-200 px-1 py-0.5 min-w-[120px] max-w-[200px]\"\n      placeholder=\"model_name\"\n    />\n    <span class=\"text-gray-300 dark:text-gray-600\">|</span>\n    <label for=\"model-target-db-{tab.id}\" class=\"text-[10px] text-gray-400 uppercase tracking-wide\">db</label>\n    <input\n      id=\"model-target-db-{tab.id}\"\n      type=\"text\"\n      value={tab.edit.targetDatabase}\n      oninput={(e) => updateModelTabEdit(tab.id, { targetDatabase: (e.target as HTMLInputElement).value })}\n      class=\"text-xs bg-transparent border border-gray-300 dark:border-gray-600 rounded px-1.5 py-0.5 w-24 text-gray-700 dark:text-gray-300 focus:border-orange-400 focus:outline-none\"\n    />\n    <span class=\"text-gray-300 dark:text-gray-600\">|</span>\n    <!-- Materialization toggle -->\n    <div class=\"flex rounded border border-gray-300 dark:border-gray-600 overflow-hidden\">\n      <button\n        onclick={() => updateModelTabEdit(tab.id, { materialization: 'view' })}\n        class=\"flex items-center gap-1 text-[10px] px-2 py-0.5 transition-colors\n          {tab.edit.materialization === 'view'\n            ? 'bg-orange-100 dark:bg-orange-900/30 text-orange-700 dark:text-orange-400'\n            : 'text-gray-500 hover:bg-gray-100 dark:hover:bg-gray-800'}\"\n      >\n        <Eye size={11} /> View\n      </button>\n      <button\n        onclick={() => updateModelTabEdit(tab.id, { materialization: 'table' })}\n        class=\"flex items-center gap-1 text-[10px] px-2 py-0.5 border-l border-gray-300 dark:border-gray-600 transition-colors\n          {tab.edit.materialization === 'table'\n            ? 'bg-orange-100 dark:bg-orange-900/30 text-orange-700 dark:text-orange-400'\n            : 'text-gray-500 hover:bg-gray-100 dark:hover:bg-gray-800'}\"\n      >\n        <Table2 size={11} /> Table\n      </button>\n    </div>\n\n    {#if tab.edit.materialization === 'table'}\n      <span class=\"text-gray-300 dark:text-gray-600\">|</span>\n      <select\n        value={tab.edit.tableEngine}\n        onchange={(e) => updateModelTabEdit(tab.id, { tableEngine: (e.target as HTMLSelectElement).value })}\n        class=\"text-[10px] bg-transparent border border-gray-300 dark:border-gray-600 rounded px-1 py-0.5 text-gray-700 dark:text-gray-300 focus:outline-none\"\n      >\n        <option value=\"MergeTree\">MergeTree</option>\n        <option value=\"ReplacingMergeTree\">ReplacingMergeTree</option>\n        <option value=\"SummingMergeTree\">SummingMergeTree</option>\n        <option value=\"AggregatingMergeTree\">AggregatingMergeTree</option>\n        <option value=\"Memory\">Memory</option>\n      </select>\n      <input\n        type=\"text\"\n        value={tab.edit.orderBy}\n        oninput={(e) => updateModelTabEdit(tab.id, { orderBy: (e.target as HTMLInputElement).value })}\n        placeholder=\"ORDER BY\"\n        class=\"text-[10px] bg-transparent border border-gray-300 dark:border-gray-600 rounded px-1.5 py-0.5 w-24 text-gray-700 dark:text-gray-300 focus:border-orange-400 focus:outline-none\"\n      />\n    {/if}\n\n    <div class=\"flex-1\"></div>\n\n    <button\n      onclick={() => { showDescription = !showDescription }}\n      class=\"text-[10px] text-gray-400 hover:text-gray-600 dark:hover:text-gray-300 flex items-center gap-0.5 transition-colors\"\n      title=\"Toggle description\"\n    >\n      <FileText size={11} />\n    </button>\n    <span class=\"w-1.5 h-1.5 rounded-full {statusDot(tab.status)}\" title={tab.status}></span>\n    <button\n      onclick={handleRun}\n      disabled={running}\n      class=\"flex items-center gap-1 text-[10px] px-2 py-1 rounded text-gray-500 hover:text-green-600 hover:bg-green-50 dark:hover:bg-green-900/20 disabled:opacity-40 transition-colors\"\n      title=\"Run this model\"\n    >\n      <Play size={12} /> {running ? 'Running...' : 'Run'}\n    </button>\n    <button\n      onclick={handleSave}\n      disabled={saving}\n      class=\"flex items-center gap-1 text-[10px] px-2.5 py-1 rounded bg-orange-500 text-white hover:bg-orange-600 disabled:opacity-50 transition-colors font-medium\"\n    >\n      <Save size={12} /> {saving ? 'Saving...' : 'Save'}\n    </button>\n  </div>\n\n  <!-- Description (collapsible) -->\n  {#if showDescription}\n    <div class=\"px-3 py-1.5 border-b border-gray-200 dark:border-gray-700 bg-gray-50/50 dark:bg-gray-800/30 shrink-0\">\n      <textarea\n        value={tab.edit.description}\n        oninput={(e) => updateModelTabEdit(tab.id, { description: (e.target as HTMLTextAreaElement).value })}\n        rows={2}\n        placeholder=\"Model description (optional)\"\n        class=\"w-full text-xs bg-transparent border-0 focus:outline-none text-gray-600 dark:text-gray-400 resize-none placeholder:text-gray-300 dark:placeholder:text-gray-600\"\n      ></textarea>\n    </div>\n  {/if}\n\n  <!-- Info hint -->\n  <div class=\"px-3 py-1 border-b border-gray-200 dark:border-gray-700 bg-gray-50/50 dark:bg-gray-800/30 shrink-0\">\n    <div class=\"flex items-center gap-2 text-[10px] text-gray-400 dark:text-gray-500\">\n      <span>Use <code class=\"px-1 py-0.5 rounded bg-gray-200 dark:bg-gray-700 font-mono\">$ref(model_name)</code> to reference other models</span>\n      <span class=\"opacity-40\">|</span>\n      <span>View = computed on read, Table = snapshot on run</span>\n    </div>\n  </div>\n\n  <!-- Editor pane -->\n  <div class=\"flex flex-col min-h-[80px] overflow-hidden\" style=\"height: {splitPercent}%\">\n    <div class=\"flex-1 min-h-0\">\n      <SqlEditor\n        bind:this={sqlEditor}\n        value={tab.edit.sqlBody}\n        onchange={handleSQLChange}\n      />\n    </div>\n  </div>\n\n  <!-- Drag handle -->\n  <!-- svelte-ignore a11y_no_static_element_interactions -->\n  <div\n    class=\"h-1 shrink-0 cursor-row-resize group flex items-center justify-center hover:bg-ch-blue/20 transition-colors {dragging ? 'bg-ch-blue/30' : 'bg-gray-200 dark:bg-gray-800'}\"\n    onmousedown={onDragStart}\n  >\n    <div class=\"w-8 h-0.5 rounded-full {dragging ? 'bg-ch-blue' : 'bg-gray-600 group-hover:bg-ch-blue/60'} transition-colors\"></div>\n  </div>\n\n  <!-- Run Output Panel -->\n  <div class=\"flex-1 min-h-[60px] overflow-auto bg-gray-50 dark:bg-gray-900/50\">\n    {#if running || runLoading}\n      <div class=\"flex items-center justify-center h-full gap-2 text-gray-400 text-sm\">\n        <Loader2 size={16} class=\"animate-spin\" />\n        <span>Running model...</span>\n      </div>\n    {:else if runResult}\n      <div class=\"p-3 space-y-3\">\n        <!-- Status header -->\n        <div class=\"flex items-center gap-3\">\n          <span class=\"shrink-0\">\n            {#if runResult.status === 'success'}\n              <CheckCircle size={16} class=\"text-green-500\" />\n            {:else if runResult.status === 'error'}\n              <XCircle size={16} class=\"text-red-500\" />\n            {:else}\n              <Clock size={16} class=\"text-blue-400\" />\n            {/if}\n          </span>\n          <span class=\"text-[10px] px-2 py-0.5 rounded-full {statusBadgeClass(runResult.status)} font-medium uppercase tracking-wide\">\n            {runResult.status}\n          </span>\n          <span class=\"text-xs text-gray-500 dark:text-gray-400\">{runResult.elapsed_ms}ms</span>\n          {#if runResult.finished_at}\n            <span class=\"text-[10px] text-gray-400 ml-auto\">{new Date(runResult.finished_at).toLocaleString()}</span>\n          {/if}\n        </div>\n\n        <!-- Error message -->\n        {#if runResult.error}\n          <div class=\"flex items-start gap-2 px-3 py-2 rounded bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800\">\n            <AlertCircle size={14} class=\"text-red-500 shrink-0 mt-0.5\" />\n            <pre class=\"text-xs text-red-600 dark:text-red-400 whitespace-pre-wrap break-all flex-1\">{runResult.error}</pre>\n          </div>\n        {/if}\n\n        <!-- Resolved SQL -->\n        {#if runResult.resolved_sql}\n          <div class=\"space-y-1\">\n            <div class=\"flex items-center gap-1.5 text-[10px] text-gray-400 uppercase tracking-wide font-medium\">\n              <Code size={11} />\n              <span>Resolved SQL</span>\n            </div>\n            <pre class=\"text-xs font-mono text-gray-600 dark:text-gray-400 bg-gray-100 dark:bg-gray-800 rounded p-3 overflow-auto max-h-48 whitespace-pre-wrap break-all\">{runResult.resolved_sql}</pre>\n          </div>\n        {/if}\n      </div>\n    {:else}\n      <div class=\"flex items-center justify-center h-full text-gray-400 dark:text-gray-500 text-sm\">\n        Run this model to see results\n      </div>\n    {/if}\n  </div>\n</div>\n\n<!-- Drag overlay -->\n{#if dragging}\n  <div class=\"fixed inset-0 z-50 cursor-row-resize\"></div>\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/layout/content/QueryContent.svelte",
    "content": "<script lang=\"ts\">\n  import type { QueryTab } from '../../../stores/tabs.svelte'\n  import { updateTabSQL, getTabResult, setTabResult, markQueryTabSaved } from '../../../stores/tabs.svelte'\n  import { formatSQL, explainQuery, fetchQueryPlan, runSampleQuery, fetchQueryProfile, estimateQuery } from '../../../api/query'\n  import type { QueryPlanNode, QueryEstimateResult } from '../../../types/query'\n  import type { SavedQuery } from '../../../types/api'\n  import { executeStreamQuery } from '../../../api/stream'\n  import { apiPost, apiPut } from '../../../api/client'\n  import { getMaxResultRows } from '../../../stores/query-limit.svelte'\n  import { error as toastError, success as toastSuccess } from '../../../stores/toast.svelte'\n  import SqlEditor from '../../editor/SqlEditor.svelte'\n  import Toolbar from '../../editor/Toolbar.svelte'\n  import ResultPanel from '../../editor/ResultPanel.svelte'\n\n  interface Props {\n    tab: QueryTab\n  }\n\n  let { tab }: Props = $props()\n\n  let editorComponent: SqlEditor\n  const savedSplit = parseFloat(localStorage.getItem('ch-ui-split-percent') ?? '40')\n  let splitPercent = $state(isNaN(savedSplit) ? 40 : savedSplit)\n  let dragging = $state(false)\n  let containerEl: HTMLDivElement\n  let abortController: AbortController | null = null\n\n  // Stream telemetry for the viewer\n  let streamRows = $state(0)\n  let streamChunks = $state(0)\n  let streamStartedAt = $state<number | null>(null)\n  let streamLastChunkAt = $state<number | null>(null)\n\n  // Query plan state\n  let planNodes = $state<QueryPlanNode[]>([])\n  let planLines = $state<string[]>([])\n  let planSource = $state('')\n  let planLoading = $state(false)\n  let planError = $state<string | null>(null)\n\n  // Inline profile state\n  let profile = $state<Record<string, unknown> | null>(null)\n  let profileAvailable = $state(false)\n  let profileReason = $state<string | null>(null)\n  let profileLoading = $state(false)\n  let profileError = $state<string | null>(null)\n\n  // Sampling mode from last sample action\n  let samplingMode = $state<string | null>(null)\n\n  // Query cost estimate state\n  let estimate = $state<QueryEstimateResult | null>(null)\n  let estimateLoading = $state(false)\n  let estimateTimer: ReturnType<typeof setTimeout> | null = null\n  let lastEstimatedSQL = ''\n\n  // Save modal state\n  let showSaveModal = $state(false)\n  let saveName = $state('')\n  let saveDescription = $state('')\n  let saving = $state(false)\n\n  const result = $derived(getTabResult(tab.id))\n\n  function handleSQLChange(sql: string) {\n    updateTabSQL(tab.id, sql)\n    debouncedEstimate(sql)\n  }\n\n  function debouncedEstimate(sql: string) {\n    if (estimateTimer) clearTimeout(estimateTimer)\n    const trimmed = sql.trim()\n    if (!trimmed || trimmed === lastEstimatedSQL) return\n    // Only estimate SELECT/WITH queries\n    const upper = trimmed.toUpperCase()\n    if (!upper.startsWith('SELECT') && !upper.startsWith('WITH')) {\n      estimate = null\n      return\n    }\n    estimateTimer = setTimeout(() => void runEstimate(trimmed), 1500)\n  }\n\n  async function runEstimate(sql: string) {\n    lastEstimatedSQL = sql\n    estimateLoading = true\n    try {\n      estimate = await estimateQuery(sql)\n    } catch {\n      estimate = null\n    } finally {\n      estimateLoading = false\n    }\n  }\n\n  function toPositionalRows(meta: Array<{ name: string }>, rows: any[]): unknown[][] {\n    if (!Array.isArray(rows)) return []\n    return rows.map((row: any) => {\n      if (Array.isArray(row)) return row\n      return meta.map((col) => row[col.name])\n    })\n  }\n\n  async function handleRun(sql?: string) {\n    const query = sql ?? editorComponent?.getSelectedOrAll() ?? ''\n    if (!query.trim()) return\n\n    // Cancel any in-flight query\n    if (abortController) abortController.abort()\n    abortController = new AbortController()\n\n    const maxResultRows = getMaxResultRows()\n    const startTime = performance.now()\n\n    // Reset telemetry and inline profile for this run\n    streamRows = 0\n    streamChunks = 0\n    streamStartedAt = Date.now()\n    streamLastChunkAt = null\n    profile = null\n    profileAvailable = false\n    profileReason = null\n    profileError = null\n    samplingMode = null\n\n    // Row buffer for progressive accumulation\n    let rowBuffer: unknown[][] = []\n    let rafId: number | null = null\n\n    setTabResult(tab.id, { running: true, error: null, meta: [], data: [], stats: null, elapsedMs: 0 })\n\n    try {\n      await executeStreamQuery(\n        query,\n        maxResultRows,\n        (meta) => {\n          setTabResult(tab.id, { meta, running: true })\n        },\n        (rows, seq) => {\n          rowBuffer.push(...rows)\n          streamRows = rowBuffer.length\n          streamChunks = seq + 1\n          streamLastChunkAt = Date.now()\n          // Batch UI updates to animation frame\n          if (!rafId) {\n            rafId = requestAnimationFrame(() => {\n              setTabResult(tab.id, { data: rowBuffer, elapsedMs: Math.round(performance.now() - startTime) })\n              rafId = null\n            })\n          }\n        },\n        (stats, totalRows) => {\n          // Final flush\n          if (rafId) { cancelAnimationFrame(rafId); rafId = null }\n          streamRows = totalRows || rowBuffer.length\n          streamLastChunkAt = Date.now()\n          setTabResult(tab.id, {\n            data: rowBuffer,\n            stats: stats ?? null,\n            elapsedMs: Math.round(performance.now() - startTime),\n            running: false,\n          })\n          void loadInlineProfile(query)\n        },\n        (error) => {\n          if (rafId) { cancelAnimationFrame(rafId); rafId = null }\n          streamLastChunkAt = Date.now()\n          setTabResult(tab.id, { error, running: false, elapsedMs: Math.round(performance.now() - startTime) })\n        },\n        abortController.signal,\n      )\n    } catch (e: any) {\n      // AbortError is expected on cancel\n      if (e.name !== 'AbortError') {\n        streamLastChunkAt = Date.now()\n        setTabResult(tab.id, { error: e.message, running: false })\n      }\n    }\n  }\n\n  function handleCancel() {\n    if (abortController) {\n      abortController.abort()\n      abortController = null\n      setTabResult(tab.id, { running: false })\n    }\n  }\n\n  async function handleFormat() {\n    const sql = editorComponent?.getValue() ?? ''\n    if (!sql.trim()) return\n    try {\n      const formatted = await formatSQL(sql)\n      editorComponent?.setValue(formatted)\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function handleExplain() {\n    const sql = editorComponent?.getValue() ?? ''\n    if (!sql.trim()) return\n\n    profile = null\n    profileAvailable = false\n    profileReason = null\n    profileError = null\n    samplingMode = null\n\n    setTabResult(tab.id, { running: true, error: null, meta: [], data: [], stats: null })\n\n    try {\n      const res = await explainQuery(sql)\n\n      setTabResult(tab.id, {\n        meta: res.meta ?? [],\n        data: toPositionalRows(res.meta ?? [], res.data ?? []),\n        stats: res.statistics ?? null,\n        elapsedMs: res.elapsed_ms ?? 0,\n        error: null,\n        running: false,\n      })\n    } catch (e: any) {\n      setTabResult(tab.id, { error: e.message, running: false })\n    }\n  }\n\n  async function handleLoadPlan() {\n    const sql = editorComponent?.getValue() ?? ''\n    if (!sql.trim()) return\n\n    planLoading = true\n    planError = null\n    try {\n      const res = await fetchQueryPlan(sql)\n      planNodes = res.nodes ?? []\n      planLines = res.lines ?? []\n      planSource = res.source ?? ''\n      if (!planNodes.length && !planLines.length) {\n        planError = 'No plan rows returned by ClickHouse'\n      }\n    } catch (e: any) {\n      planError = e.message\n    } finally {\n      planLoading = false\n    }\n  }\n\n  async function handleSample(perShard: number) {\n    const sql = editorComponent?.getValue() ?? ''\n    if (!sql.trim()) return\n\n    setTabResult(tab.id, { running: true, error: null, meta: [], data: [], stats: null })\n    samplingMode = null\n\n    try {\n      const res = await runSampleQuery({ query: sql, per_shard: perShard, shard_by: '_shard_num' })\n      const rows = toPositionalRows(res.meta ?? [], (res.data as any[]) ?? [])\n      setTabResult(tab.id, {\n        meta: res.meta ?? [],\n        data: rows,\n        stats: res.statistics ?? null,\n        elapsedMs: res.elapsed_ms ?? 0,\n        error: null,\n        running: false,\n      })\n      samplingMode = res.sampling_mode ?? 'global'\n      streamRows = rows.length\n      streamChunks = rows.length > 0 ? 1 : 0\n      streamStartedAt = Date.now()\n      streamLastChunkAt = Date.now()\n\n      if (res.warning) toastSuccess(res.warning)\n    } catch (e: any) {\n      setTabResult(tab.id, { error: e.message, running: false })\n    }\n  }\n\n  async function loadInlineProfile(executedQuery: string) {\n    profileLoading = true\n    profileError = null\n    profileReason = null\n    try {\n      const res = await fetchQueryProfile(executedQuery)\n      profileAvailable = !!res.available\n      profile = res.profile ?? null\n      profileReason = res.reason ?? null\n    } catch (e: any) {\n      profileAvailable = false\n      profile = null\n      profileError = e.message\n    } finally {\n      profileLoading = false\n    }\n  }\n\n  function handleSaveClick() {\n    if (tab.savedQueryId) {\n      void saveLinkedSavedQuery()\n      return\n    }\n    saveName = tab.name\n    saveDescription = ''\n    showSaveModal = true\n  }\n\n  async function saveLinkedSavedQuery() {\n    const sql = editorComponent?.getValue() ?? tab.sql\n    if (!sql.trim() || !tab.savedQueryId) return\n\n    saving = true\n    try {\n      await apiPut(`/api/saved-queries/${tab.savedQueryId}`, {\n        query: sql,\n      })\n      markQueryTabSaved(tab.id, { baseSql: sql })\n      toastSuccess('Saved query updated')\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      saving = false\n    }\n  }\n\n  async function handleSaveConfirm() {\n    const sql = editorComponent?.getValue() ?? tab.sql\n    if (!sql.trim() || !saveName.trim()) return\n\n    saving = true\n    try {\n      const created = await apiPost<SavedQuery>('/api/saved-queries', {\n        name: saveName.trim(),\n        description: saveDescription.trim(),\n        query: sql,\n      })\n      if (created?.id) {\n        markQueryTabSaved(tab.id, {\n          savedQueryId: created.id,\n          name: created.name,\n          baseSql: sql,\n        })\n      } else {\n        markQueryTabSaved(tab.id, { baseSql: sql })\n      }\n      toastSuccess('Query saved')\n      showSaveModal = false\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      saving = false\n    }\n  }\n\n  // ── Drag handle logic ──\n  function onDragStart(e: MouseEvent) {\n    e.preventDefault()\n    dragging = true\n    document.addEventListener('mousemove', onDragMove)\n    document.addEventListener('mouseup', onDragEnd)\n  }\n\n  function onDragMove(e: MouseEvent) {\n    if (!containerEl) return\n    const rect = containerEl.getBoundingClientRect()\n    const y = e.clientY - rect.top\n    const pct = (y / rect.height) * 100\n    splitPercent = Math.max(15, Math.min(85, pct))\n  }\n\n  function onDragEnd() {\n    dragging = false\n    document.removeEventListener('mousemove', onDragMove)\n    document.removeEventListener('mouseup', onDragEnd)\n    localStorage.setItem('ch-ui-split-percent', String(splitPercent))\n  }\n</script>\n\n<div class=\"flex flex-col h-full overflow-hidden\" bind:this={containerEl}>\n  <!-- Editor pane -->\n  <div class=\"flex flex-col min-h-[80px] overflow-hidden border-b border-gray-200 dark:border-gray-800\" style=\"height: {splitPercent}%\">\n    <Toolbar\n      running={result?.running ?? false}\n      onrun={() => handleRun()}\n      oncancel={handleCancel}\n      onformat={handleFormat}\n      onexplain={handleExplain}\n      onsave={handleSaveClick}\n      {estimate}\n      {estimateLoading}\n    />\n    <div class=\"flex-1 min-h-0\">\n      <SqlEditor\n        bind:this={editorComponent}\n        value={tab.sql}\n        onrun={handleRun}\n        onchange={handleSQLChange}\n      />\n    </div>\n  </div>\n\n  <!-- Drag handle -->\n  <!-- svelte-ignore a11y_no_static_element_interactions -->\n  <div\n    class=\"h-1 shrink-0 cursor-row-resize group flex items-center justify-center hover:bg-ch-blue/20 transition-colors {dragging ? 'bg-ch-blue/30' : 'bg-gray-200 dark:bg-gray-800'}\"\n    onmousedown={onDragStart}\n  >\n    <div class=\"w-8 h-0.5 rounded-full {dragging ? 'bg-ch-blue' : 'bg-gray-600 group-hover:bg-ch-blue/60'} transition-colors\"></div>\n  </div>\n\n  <!-- Results pane -->\n  <div class=\"flex-1 min-h-[80px] min-h-0 overflow-hidden flex flex-col\">\n    <ResultPanel\n      meta={result?.meta ?? []}\n      data={result?.data ?? []}\n      loading={result?.running ?? false}\n      error={result?.error ?? null}\n      stats={result?.stats ?? null}\n      elapsedMs={result?.elapsedMs ?? 0}\n      running={result?.running ?? false}\n      {streamRows}\n      {streamChunks}\n      {streamStartedAt}\n      {streamLastChunkAt}\n      planNodes={planNodes}\n      planLines={planLines}\n      planSource={planSource}\n      planLoading={planLoading}\n      planError={planError}\n      onLoadPlan={handleLoadPlan}\n      onSample={handleSample}\n      {profile}\n      {profileAvailable}\n      {profileReason}\n      {profileLoading}\n      {profileError}\n      samplingMode={samplingMode}\n      {estimate}\n    />\n  </div>\n</div>\n\n<!-- Drag overlay to prevent iframe/editor stealing mouse events -->\n{#if dragging}\n  <div class=\"fixed inset-0 z-50 cursor-row-resize\"></div>\n{/if}\n\n<!-- Save query modal -->\n{#if showSaveModal}\n  <!-- svelte-ignore a11y_no_static_element_interactions -->\n  <div\n    class=\"fixed inset-0 z-50 flex items-center justify-center bg-black/60\"\n    onclick={() => showSaveModal = false}\n    onkeydown={(e) => e.key === 'Escape' && (showSaveModal = false)}\n  >\n    <!-- svelte-ignore a11y_no_static_element_interactions -->\n    <div\n      class=\"bg-gray-50 dark:bg-gray-900 border border-gray-300 dark:border-gray-700 rounded-lg p-5 w-96 shadow-xl\"\n      onclick={(e) => e.stopPropagation()}\n      onkeydown={(e) => e.stopPropagation()}\n      tabindex=\"-1\"\n    >\n      <h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200 mb-3\">Save Query</h3>\n\n      <label class=\"block mb-2\">\n        <span class=\"text-xs text-gray-500 dark:text-gray-400\">Name</span>\n        <input\n          type=\"text\"\n          class=\"mt-1 w-full px-2.5 py-1.5 bg-gray-200 dark:bg-gray-800 border border-gray-300 dark:border-gray-700 rounded text-sm text-gray-800 dark:text-gray-200 focus:outline-none focus:border-ch-blue\"\n          bind:value={saveName}\n          onkeydown={(e) => e.key === 'Enter' && handleSaveConfirm()}\n        />\n      </label>\n\n      <label class=\"block mb-4\">\n        <span class=\"text-xs text-gray-500 dark:text-gray-400\">Description (optional)</span>\n        <input\n          type=\"text\"\n          class=\"mt-1 w-full px-2.5 py-1.5 bg-gray-200 dark:bg-gray-800 border border-gray-300 dark:border-gray-700 rounded text-sm text-gray-800 dark:text-gray-200 focus:outline-none focus:border-ch-blue\"\n          bind:value={saveDescription}\n        />\n      </label>\n\n      <div class=\"flex justify-end gap-2\">\n        <button\n          class=\"px-3 py-1.5 text-xs text-gray-500 dark:text-gray-400 hover:text-gray-800 dark:hover:text-gray-200 rounded hover:bg-gray-200 dark:hover:bg-gray-800\"\n          onclick={() => showSaveModal = false}\n        >Cancel</button>\n        <button\n          class=\"px-3 py-1.5 text-xs bg-ch-blue text-white rounded hover:bg-ch-blue/80 disabled:opacity-50\"\n          onclick={handleSaveConfirm}\n          disabled={saving || !saveName.trim()}\n        >{saving ? 'Saving...' : 'Save'}</button>\n      </div>\n    </div>\n  </div>\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/layout/content/TableContent.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount, onDestroy } from 'svelte'\n  import type { TableTab } from '../../../stores/tabs.svelte'\n  import type { ColumnMeta } from '../../../types/query'\n  import { fetchTableInfo, fetchTableSchema } from '../../../api/query'\n  import { formatBytes, formatNumber } from '../../../utils/format'\n  import { copyToClipboard } from '../../../utils/export'\n  import { success } from '../../../stores/toast.svelte'\n  import { getTheme } from '../../../stores/theme.svelte'\n  import DataPreview from '../../explorer/DataPreview.svelte'\n  import VirtualTable from '../../table/VirtualTable.svelte'\n  import Spinner from '../../common/Spinner.svelte'\n  import { Table2, Database, HardDrive, Rows3, Clock, Key, LayoutGrid, RefreshCw, Copy, Check, ChevronDown, Braces, FileSpreadsheet, FileText, Code2 } from 'lucide-svelte'\n  import ContextMenu from '../../common/ContextMenu.svelte'\n  import type { ContextMenuItem } from '../../common/ContextMenu.svelte'\n  import { EditorView } from '@codemirror/view'\n  import { EditorState, Compartment } from '@codemirror/state'\n  import { HighlightStyle, syntaxHighlighting } from '@codemirror/language'\n  import { sql, StandardSQL } from '@codemirror/lang-sql'\n  import { tags as t } from '@lezer/highlight'\n\n  interface Props {\n    tab: TableTab\n  }\n\n  let { tab }: Props = $props()\n\n  type SubTab = 'overview' | 'schema' | 'data'\n  let activeSubTab = $state<SubTab>('overview')\n\n  // Overview data\n  let tableInfo = $state<Record<string, any>>({})\n  let infoLoading = $state(true)\n  let infoError = $state<string | null>(null)\n\n  // Schema data\n  let schemaMeta = $state<ColumnMeta[]>([])\n  let schemaData = $state<unknown[][]>([])\n  let schemaLoading = $state(false)\n  let schemaError = $state<string | null>(null)\n  let schemaLoaded = $state(false)\n\n  // Copy button state\n  let copied = $state(false)\n  let copyTimeout: ReturnType<typeof setTimeout> | undefined\n\n  // Schema copy menu state\n  let schemaCopyMenuOpen = $state(false)\n  let schemaCopyMenuX = $state(0)\n  let schemaCopyMenuY = $state(0)\n\n  // CodeMirror state\n  let cmContainer = $state<HTMLDivElement>(undefined!)\n  let cmView: EditorView | undefined\n  const themeCompartment = new Compartment()\n  let observer: MutationObserver | undefined\n\n  const lightTheme = EditorView.theme({\n    '&': { backgroundColor: '#ffffff' },\n    '.cm-gutters': { backgroundColor: '#f4f4f5', borderRight: '1px solid #d4d4d8' },\n    '.cm-activeLineGutter': { backgroundColor: '#ececef' },\n    '.cm-activeLine': { backgroundColor: 'transparent' },\n    '.cm-selectionBackground': { backgroundColor: '#ffedd5 !important' },\n    '&.cm-focused .cm-selectionBackground': { backgroundColor: '#fed7aa !important' },\n    '.cm-cursor': { borderLeftColor: '#1f2126' },\n    '.cm-matchingBracket': { backgroundColor: '#ffedd5', outline: '1px solid #fb923c' },\n  }, { dark: false })\n\n  const darkTheme = EditorView.theme({\n    '&': { backgroundColor: '#17181d', color: '#f3f4f6' },\n    '.cm-gutters': { backgroundColor: '#1d1f25', borderRight: '1px solid #3f434c', color: '#a5a8b2' },\n    '.cm-activeLine': { backgroundColor: 'rgba(249,115,22,0.1)' },\n    '.cm-activeLineGutter': { backgroundColor: 'rgba(249,115,22,0.16)' },\n    '.cm-selectionBackground': { backgroundColor: 'rgba(251,146,60,0.32) !important' },\n    '.cm-matchingBracket': { backgroundColor: 'rgba(249,115,22,0.2)', outline: '1px solid rgba(251,146,60,0.9)' },\n    '.cm-cursor': { borderLeftColor: '#f3f4f6' },\n  }, { dark: true })\n\n  const lightHighlight = HighlightStyle.define([\n    { tag: [t.keyword, t.operatorKeyword, t.controlKeyword, t.definitionKeyword, t.moduleKeyword], color: '#c2410c', fontWeight: '600' },\n    { tag: [t.function(t.variableName), t.function(t.propertyName)], color: '#ea580c' },\n    { tag: [t.variableName, t.definition(t.variableName), t.definition(t.name), t.special(t.variableName)], color: '#27272a' },\n    { tag: [t.propertyName], color: '#166534' },\n    { tag: [t.typeName, t.className], color: '#374151', fontWeight: '500' },\n    { tag: [t.string, t.special(t.string)], color: '#15803d' },\n    { tag: [t.number, t.integer, t.float, t.atom], color: '#b45309' },\n    { tag: [t.bool, t.null], color: '#a16207', fontWeight: '600' },\n    { tag: [t.comment], color: '#71717a', fontStyle: 'italic' },\n    { tag: [t.operator, t.punctuation, t.separator, t.bracket], color: '#52525b' },\n    { tag: t.invalid, color: '#b91c1c', textDecoration: 'underline wavy' },\n  ])\n\n  const darkHighlight = HighlightStyle.define([\n    { tag: [t.keyword, t.operatorKeyword, t.controlKeyword, t.definitionKeyword, t.moduleKeyword], color: '#fb923c', fontWeight: '600' },\n    { tag: [t.function(t.variableName), t.function(t.propertyName)], color: '#fdba74' },\n    { tag: [t.variableName, t.definition(t.variableName), t.definition(t.name), t.special(t.variableName)], color: '#f4f4f5' },\n    { tag: [t.propertyName], color: '#86efac' },\n    { tag: [t.typeName, t.className], color: '#d4d4d8', fontWeight: '500' },\n    { tag: [t.string, t.special(t.string)], color: '#4ade80' },\n    { tag: [t.number, t.integer, t.float, t.atom], color: '#fbbf24' },\n    { tag: [t.bool, t.null], color: '#f59e0b', fontWeight: '600' },\n    { tag: [t.comment], color: '#9ca3af', fontStyle: 'italic' },\n    { tag: [t.operator, t.punctuation, t.separator, t.bracket], color: '#d4d4d8' },\n    { tag: t.invalid, color: '#f87171', textDecoration: 'underline wavy' },\n  ])\n\n  function getThemeExtension() {\n    return getTheme() === 'dark'\n      ? [darkTheme, syntaxHighlighting(darkHighlight)]\n      : [lightTheme, syntaxHighlighting(lightHighlight)]\n  }\n\n  function formatCreateTableSQL(raw: string): string {\n    if (!raw) return raw\n    let s = raw.trim()\n    // Add newline before top-level keywords\n    s = s.replace(/\\s+(ENGINE\\s*=)/gi, '\\n$1')\n    s = s.replace(/\\s+(ORDER\\s+BY)/gi, '\\n$1')\n    s = s.replace(/\\s+(PARTITION\\s+BY)/gi, '\\n$1')\n    s = s.replace(/\\s+(PRIMARY\\s+KEY)/gi, '\\n$1')\n    s = s.replace(/\\s+(SETTINGS)/gi, '\\n$1')\n    s = s.replace(/\\s+(TTL)/gi, '\\n$1')\n    // Opening paren on same line, but newline after it\n    s = s.replace(/\\(\\s*`/g, '(\\n  `')\n    // Each column on its own line (comma followed by backtick)\n    s = s.replace(/,\\s*`/g, ',\\n  `')\n    // Closing paren on its own line\n    s = s.replace(/\\)\\s*(ENGINE)/gi, '\\n)\\n$1')\n    return s\n  }\n\n  function mountCodeMirror(sqlText: string) {\n    if (!cmContainer) return\n    cmView?.destroy()\n\n    const state = EditorState.create({\n      doc: formatCreateTableSQL(sqlText),\n      extensions: [\n        sql({ dialect: StandardSQL }),\n        EditorState.readOnly.of(true),\n        EditorView.editable.of(false),\n        themeCompartment.of(getThemeExtension()),\n        EditorView.theme({\n          '&': { fontSize: '12px', maxHeight: '360px' },\n          '.cm-scroller': { overflow: 'auto', fontFamily: 'var(--font-mono)' },\n          '.cm-content': { padding: '0.75rem 0.875rem 1rem' },\n          '.cm-gutters': { display: 'none' },\n          '&.cm-focused': { outline: 'none' },\n          '.cm-activeLine': { backgroundColor: 'transparent' },\n        }),\n        EditorView.lineWrapping,\n      ],\n    })\n\n    cmView = new EditorView({ state, parent: cmContainer })\n\n    observer?.disconnect()\n    observer = new MutationObserver(() => {\n      cmView?.dispatch({\n        effects: themeCompartment.reconfigure(getThemeExtension()),\n      })\n    })\n    observer.observe(document.documentElement, { attributes: true, attributeFilter: ['class'] })\n  }\n\n  onMount(() => {\n    loadInfo()\n  })\n\n  onDestroy(() => {\n    cmView?.destroy()\n    observer?.disconnect()\n    if (copyTimeout) clearTimeout(copyTimeout)\n  })\n\n  async function loadInfo() {\n    infoLoading = true\n    infoError = null\n    try {\n      tableInfo = await fetchTableInfo(tab.database, tab.table)\n    } catch (e: any) {\n      infoError = e.message\n    } finally {\n      infoLoading = false\n    }\n  }\n\n  function handleRefresh() {\n    schemaLoaded = false\n    loadInfo()\n  }\n\n  async function handleCopy() {\n    if (!tableInfo.create_table_query) return\n    await copyToClipboard(tableInfo.create_table_query)\n    success('Copied to clipboard')\n    copied = true\n    if (copyTimeout) clearTimeout(copyTimeout)\n    copyTimeout = setTimeout(() => { copied = false }, 2000)\n  }\n\n  function openSchemaCopyMenu(e: MouseEvent) {\n    const rect = (e.currentTarget as HTMLElement).getBoundingClientRect()\n    schemaCopyMenuX = rect.left\n    schemaCopyMenuY = rect.bottom + 4\n    schemaCopyMenuOpen = true\n  }\n\n  function schemaRows(): Record<string, unknown>[] {\n    return schemaData.map((row) => {\n      const obj: Record<string, unknown> = {}\n      schemaMeta.forEach((col, i) => { obj[col.name] = (row as unknown[])[i] ?? '' })\n      return obj\n    })\n  }\n\n  async function copySchemaAsJSON() {\n    await copyToClipboard(JSON.stringify(schemaRows(), null, 2))\n    success('Schema copied as JSON')\n  }\n\n  async function copySchemaAsTSV() {\n    const header = schemaMeta.map((c) => c.name).join('\\t')\n    const rows = schemaData.map((row) => (row as unknown[]).map((v) => v ?? '').join('\\t'))\n    await copyToClipboard([header, ...rows].join('\\n'))\n    success('Schema copied as TSV')\n  }\n\n  async function copySchemaAsCSV() {\n    const escape = (v: unknown) => {\n      const s = String(v ?? '')\n      return s.includes(',') || s.includes('\"') || s.includes('\\n') ? `\"${s.replace(/\"/g, '\"\"')}\"` : s\n    }\n    const header = schemaMeta.map((c) => escape(c.name)).join(',')\n    const rows = schemaData.map((row) => (row as unknown[]).map((v) => escape(v)).join(','))\n    await copyToClipboard([header, ...rows].join('\\n'))\n    success('Schema copied as CSV')\n  }\n\n  async function copySchemaAsSQL() {\n    const cols = schemaRows()\n      .map((r) => `  ${r.name} ${r.type}`)\n      .join(',\\n')\n    const ddl = `CREATE TABLE ${tab.database}.${tab.table}\\n(\\n${cols}\\n)`\n    await copyToClipboard(ddl)\n    success('Schema copied as SQL')\n  }\n\n  const schemaCopyItems: ContextMenuItem[] = [\n    { id: 'json', label: 'Copy as JSON', icon: Braces, onSelect: copySchemaAsJSON },\n    { id: 'tsv', label: 'Copy as TSV', icon: FileSpreadsheet, onSelect: copySchemaAsTSV },\n    { id: 'csv', label: 'Copy as CSV', icon: FileText, onSelect: copySchemaAsCSV },\n    { id: 'sql', label: 'Copy as SQL', icon: Code2, onSelect: copySchemaAsSQL },\n  ]\n\n  async function loadSchema() {\n    if (schemaLoaded) return\n    schemaLoading = true\n    schemaError = null\n    try {\n      const res = await fetchTableSchema(tab.database, tab.table)\n      schemaMeta = res.meta ?? []\n      schemaData = (res.data ?? []).map((row: any) => {\n        if (Array.isArray(row)) return row\n        return res.meta.map((col: any) => row[col.name])\n      })\n      schemaLoaded = true\n    } catch (e: any) {\n      schemaError = e.message\n    } finally {\n      schemaLoading = false\n    }\n  }\n\n  function switchTab(t: SubTab) {\n    activeSubTab = t\n    if (t === 'schema') loadSchema()\n  }\n\n  function formatDateTime(value: unknown): string {\n    if (!value) return '—'\n    const date = new Date(String(value))\n    if (Number.isNaN(date.getTime())) return String(value)\n    try {\n      return new Intl.DateTimeFormat(undefined, {\n        year: 'numeric',\n        month: '2-digit',\n        day: '2-digit',\n        hour: '2-digit',\n        minute: '2-digit',\n        second: '2-digit',\n      }).format(date)\n    } catch {\n      return date.toLocaleString()\n    }\n  }\n\n  const subTabs: { id: SubTab; label: string }[] = [\n    { id: 'overview', label: 'Overview' },\n    { id: 'schema', label: 'Schema' },\n    { id: 'data', label: 'Data Sample' },\n  ]\n\n  // Metric cards\n  const metrics = $derived.by(() => {\n    if (!tableInfo || Object.keys(tableInfo).length === 0) return []\n    return [\n      { label: 'Rows', value: formatNumber(Number(tableInfo.total_rows ?? 0)), icon: Rows3, color: 'text-ch-orange' },\n      { label: 'Size', value: formatBytes(Number(tableInfo.total_bytes ?? 0)), icon: HardDrive, color: 'text-ch-green' },\n      { label: 'Engine', value: tableInfo.engine ?? '—', icon: Database, color: 'text-ch-orange' },\n      { label: 'Last Modified', value: formatDateTime(tableInfo.metadata_modification_time), icon: Clock, color: 'text-gray-500' },\n    ]\n  })\n\n  // Whether any keys exist\n  const hasKeys = $derived(\n    !!(tableInfo.partition_key || tableInfo.sorting_key || tableInfo.primary_key || tableInfo.sampling_key)\n  )\n\n  // Mount CodeMirror when create_table_query becomes available and tab is overview\n  $effect(() => {\n    if (activeSubTab === 'overview' && tableInfo.create_table_query && cmContainer) {\n      mountCodeMirror(tableInfo.create_table_query)\n    }\n  })\n</script>\n\n<div class=\"flex flex-col h-full\">\n  <!-- Header -->\n  <div class=\"flex items-center gap-3 px-4 py-3 border-b border-gray-200 dark:border-gray-800 bg-gray-100/40 dark:bg-gray-900/45 shrink-0\">\n    <Table2 size={16} class=\"text-ch-orange shrink-0\" />\n    <div class=\"min-w-0\">\n      <span class=\"text-sm text-gray-500\">{tab.database}.</span>\n      <span class=\"text-sm font-medium text-gray-800 dark:text-gray-200 truncate\">{tab.table}</span>\n    </div>\n    <button\n      class=\"ml-1 p-1 rounded hover:bg-gray-200 dark:hover:bg-gray-800 transition-colors text-gray-400 hover:text-gray-600 dark:hover:text-gray-300\"\n      onclick={handleRefresh}\n      title=\"Refresh table info\"\n    >\n      <RefreshCw size={13} class={infoLoading ? 'animate-spin' : ''} />\n    </button>\n  </div>\n\n  <!-- Sub-tab bar -->\n  <div class=\"flex items-center gap-1 px-3 py-1.5 border-b border-gray-200 dark:border-gray-800 bg-white dark:bg-gray-950 shrink-0\">\n    {#each subTabs as st}\n      <button\n        class=\"px-3 py-1.5 text-xs rounded-md transition-colors whitespace-nowrap {activeSubTab === st.id\n          ? 'bg-gray-200 dark:bg-gray-800 text-gray-800 dark:text-gray-200 font-medium'\n          : 'text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 hover:bg-gray-200/50 dark:hover:bg-gray-800/50'}\"\n        onclick={() => switchTab(st.id)}\n      >{st.label}</button>\n    {/each}\n  </div>\n\n  <!-- Sub-tab content -->\n  <div class=\"flex-1 min-h-0 overflow-auto\">\n    {#if activeSubTab === 'overview'}\n      {#if infoLoading}\n        <div class=\"flex items-center justify-center py-12 gap-2\">\n          <Spinner size=\"sm\" />\n          <span class=\"text-sm text-gray-500\">Loading table info...</span>\n        </div>\n      {:else if infoError}\n        <div class=\"p-4\">\n          <div class=\"bg-red-100/20 dark:bg-red-900/20 border border-red-300/50 dark:border-red-800/50 rounded-lg p-3 text-sm text-red-700 dark:text-red-300\">{infoError}</div>\n        </div>\n      {:else}\n        <!-- Metric cards -->\n        <div class=\"grid grid-cols-2 xl:grid-cols-4 gap-3 p-4\">\n          {#each metrics as m}\n            {@const Icon = m.icon}\n            <div class=\"bg-gray-50 dark:bg-gray-900 border border-gray-200 dark:border-gray-800 rounded-xl p-4\">\n              <div class=\"flex items-center gap-2 mb-2\">\n                <Icon size={14} class={m.color} />\n                <span class=\"text-xs text-gray-500 uppercase tracking-wider\">{m.label}</span>\n              </div>\n              <div class=\"text-xl font-semibold text-gray-800 dark:text-gray-200 truncate\">{m.value}</div>\n            </div>\n          {/each}\n        </div>\n\n        <!-- Detail rows (keys) — only if any keys exist -->\n        {#if hasKeys}\n          <div class=\"px-4 pb-4\">\n            <div class=\"bg-gray-50 dark:bg-gray-900 border border-gray-200 dark:border-gray-800 rounded-xl p-4\">\n              <h3 class=\"text-xs text-gray-500 uppercase tracking-wider mb-3\">Storage Keys</h3>\n              <div class=\"grid grid-cols-[140px_1fr] gap-y-2 gap-x-3 text-sm\">\n              {#if tableInfo.partition_key}\n                <span class=\"text-gray-500 inline-flex items-center gap-2\"><LayoutGrid size={13} class=\"shrink-0\" />Partition Key</span>\n                <code class=\"text-xs text-gray-700 dark:text-gray-300 font-mono truncate\">{tableInfo.partition_key}</code>\n              {/if}\n              {#if tableInfo.sorting_key}\n                <span class=\"text-gray-500 inline-flex items-center gap-2\"><Key size={13} class=\"shrink-0\" />Sorting Key</span>\n                <code class=\"text-xs text-gray-700 dark:text-gray-300 font-mono truncate\">{tableInfo.sorting_key}</code>\n              {/if}\n              {#if tableInfo.primary_key}\n                <span class=\"text-gray-500 inline-flex items-center gap-2\"><Key size={13} class=\"text-ch-orange shrink-0\" />Primary Key</span>\n                <code class=\"text-xs text-gray-700 dark:text-gray-300 font-mono truncate\">{tableInfo.primary_key}</code>\n              {/if}\n              {#if tableInfo.sampling_key}\n                <span class=\"text-gray-500 inline-flex items-center gap-2\"><Key size={13} class=\"shrink-0\" />Sampling Key</span>\n                <code class=\"text-xs text-gray-700 dark:text-gray-300 font-mono truncate\">{tableInfo.sampling_key}</code>\n              {/if}\n              </div>\n            </div>\n          </div>\n        {/if}\n\n        <!-- CREATE TABLE statement -->\n        {#if tableInfo.create_table_query}\n          <div class=\"px-4 pb-5\">\n            <div class=\"mb-2.5 flex items-center justify-between\">\n              <h4 class=\"text-xs text-gray-500 uppercase tracking-wider\">Create Table SQL</h4>\n              <span class=\"text-[11px] text-gray-400\">Syntax highlighted</span>\n            </div>\n            <div class=\"relative bg-gray-50/80 dark:bg-gray-900/80 border border-gray-200 dark:border-gray-800 rounded-xl p-2\">\n              <button\n                class=\"absolute top-2 right-2 z-10 p-1.5 rounded-md bg-gray-200/80 dark:bg-gray-800/80 hover:bg-gray-300 dark:hover:bg-gray-700 transition-colors text-gray-500 hover:text-gray-700 dark:hover:text-gray-300\"\n                onclick={handleCopy}\n                title=\"Copy CREATE TABLE\"\n              >\n                {#if copied}\n                  <Check size={13} class=\"text-green-500\" />\n                {:else}\n                  <Copy size={13} />\n                {/if}\n              </button>\n              <div class=\"rounded-lg overflow-hidden border border-gray-200 dark:border-gray-800\" bind:this={cmContainer}></div>\n            </div>\n          </div>\n        {/if}\n      {/if}\n\n    {:else if activeSubTab === 'schema'}\n      {#if schemaLoading}\n        <div class=\"flex items-center justify-center py-12 gap-2\">\n          <Spinner size=\"sm\" />\n          <span class=\"text-sm text-gray-500\">Loading schema...</span>\n        </div>\n      {:else if schemaError}\n        <div class=\"p-4\">\n          <div class=\"bg-red-100/20 dark:bg-red-900/20 border border-red-300/50 dark:border-red-800/50 rounded-lg p-3 text-sm text-red-700 dark:text-red-300\">{schemaError}</div>\n        </div>\n      {:else if schemaMeta.length > 0}\n        <div class=\"flex items-center justify-end px-3 py-1.5 border-b border-gray-200 dark:border-gray-800\">\n          <button\n            class=\"inline-flex items-center gap-1.5 px-2.5 py-1 text-xs rounded-md transition-colors\n              bg-gray-100 dark:bg-gray-800 text-gray-600 dark:text-gray-300 hover:bg-gray-200 dark:hover:bg-gray-700\"\n            onclick={openSchemaCopyMenu}\n            title=\"Copy schema to clipboard\"\n          >\n            <Copy size={12} />\n            Copy Schema\n            <ChevronDown size={11} class=\"text-gray-400 transition-transform {schemaCopyMenuOpen ? 'rotate-180' : ''}\" />\n          </button>\n        </div>\n        <ContextMenu\n          open={schemaCopyMenuOpen}\n          x={schemaCopyMenuX}\n          y={schemaCopyMenuY}\n          items={schemaCopyItems}\n          onclose={() => schemaCopyMenuOpen = false}\n        />\n        <VirtualTable meta={schemaMeta} data={schemaData} />\n      {:else}\n        <div class=\"flex items-center justify-center py-12 text-gray-400 dark:text-gray-600 text-sm\">No schema data</div>\n      {/if}\n\n    {:else if activeSubTab === 'data'}\n      <DataPreview database={tab.database} table={tab.table} />\n    {/if}\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/models/ModelNode.svelte",
    "content": "<script lang=\"ts\">\n  import { Handle, Position } from '@xyflow/svelte'\n  import { Eye, Table2 } from 'lucide-svelte'\n  import type { Materialization, ModelStatus } from '../../types/models'\n\n  interface Props {\n    data: {\n      name: string\n      materialization: Materialization\n      status: ModelStatus\n      target_database: string\n    }\n  }\n\n  let { data }: Props = $props()\n\n  const statusColor = $derived(\n    data.status === 'success'\n      ? 'border-green-400 dark:border-green-600'\n      : data.status === 'error'\n        ? 'border-red-400 dark:border-red-600'\n        : 'border-gray-300 dark:border-gray-600'\n  )\n\n  const statusBg = $derived(\n    data.status === 'success'\n      ? 'bg-green-50 dark:bg-green-900/20'\n      : data.status === 'error'\n        ? 'bg-red-50 dark:bg-red-900/20'\n        : 'bg-gray-50 dark:bg-gray-800/40'\n  )\n\n  const statusDot = $derived(\n    data.status === 'success'\n      ? 'bg-green-500'\n      : data.status === 'error'\n        ? 'bg-red-500'\n        : 'bg-gray-400'\n  )\n\n  const Icon = $derived(data.materialization === 'table' ? Table2 : Eye)\n</script>\n\n<div class=\"rounded-lg border-2 {statusColor} {statusBg} shadow-sm min-w-[180px] cursor-pointer hover:shadow-md transition-shadow\">\n  <div class=\"flex items-center gap-2 px-3 py-2\">\n    <Icon size={14} class=\"text-gray-600 dark:text-gray-300 shrink-0\" />\n    <div class=\"min-w-0 flex-1\">\n      <div class=\"text-xs font-medium text-gray-800 dark:text-gray-200 truncate\">{data.name}</div>\n      <div class=\"text-[10px] text-gray-500 dark:text-gray-400 flex items-center gap-1\">\n        <span>{data.materialization}</span>\n        <span class=\"opacity-50\">|</span>\n        <span>{data.target_database}</span>\n      </div>\n    </div>\n    <span class=\"w-2 h-2 rounded-full {statusDot} shrink-0\"></span>\n  </div>\n  <Handle type=\"target\" position={Position.Left} class=\"!bg-orange-500 !w-3 !h-3 !border-2 !border-white dark:!border-gray-900\" />\n  <Handle type=\"source\" position={Position.Right} class=\"!bg-orange-500 !w-3 !h-3 !border-2 !border-white dark:!border-gray-900\" />\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/pipelines/NodeConfigPanel.svelte",
    "content": "<script lang=\"ts\">\n  import { CONNECTOR_FIELDS, type NodeType, type ConnectorFieldDef } from '../../types/pipelines'\n  import { X, HelpCircle, Copy, Check, Shield } from 'lucide-svelte'\n\n  interface Props {\n    nodeId: string\n    nodeType: NodeType\n    label: string\n    config: Record<string, unknown>\n    pipelineId: string\n    onUpdate: (nodeId: string, config: Record<string, unknown>, label: string) => void\n    onClose: () => void\n  }\n\n  let { nodeId, nodeType, label, config, pipelineId, onUpdate, onClose }: Props = $props()\n\n  let copied = $state(false)\n  let copiedToken = $state(false)\n\n  let localLabel = $state(label)  // eslint-disable-line -- initial value from prop is intentional\n  let localConfig = $state<Record<string, unknown>>({ ...config })  // eslint-disable-line -- initial value from prop is intentional\n\n  const fields = $derived(CONNECTOR_FIELDS[nodeType] || [])\n\n  // Initialize defaults for fields that don't have a value\n  $effect(() => {\n    let changed = false\n    for (const field of fields) {\n      if (localConfig[field.key] === undefined && field.default !== undefined) {\n        localConfig[field.key] = field.default\n        changed = true\n      }\n    }\n    if (changed) {\n      localConfig = { ...localConfig }\n    }\n  })\n\n  function handleChange(key: string, value: unknown) {\n    localConfig = { ...localConfig, [key]: value }\n    onUpdate(nodeId, localConfig, localLabel)\n  }\n\n  function handleLabelChange(value: string) {\n    localLabel = value\n    onUpdate(nodeId, localConfig, localLabel)\n  }\n\n  function getFieldValue(field: ConnectorFieldDef): unknown {\n    return localConfig[field.key] ?? field.default ?? ''\n  }\n\n  function generateToken(): string {\n    const bytes = new Uint8Array(24)\n    crypto.getRandomValues(bytes)\n    return Array.from(bytes, (b) => b.toString(16).padStart(2, '0')).join('')\n  }\n\n  function handleAuthToggle(enabled: boolean) {\n    if (enabled) {\n      const token = generateToken()\n      localConfig = { ...localConfig, auth_enabled: true, auth_token: token }\n    } else {\n      localConfig = { ...localConfig, auth_enabled: false, auth_token: '' }\n    }\n    onUpdate(nodeId, localConfig, localLabel)\n  }\n</script>\n\n<div class=\"flex flex-col h-full border-l border-gray-200 dark:border-gray-800 bg-white dark:bg-gray-950 w-72\">\n  <!-- Header -->\n  <div class=\"flex items-center justify-between px-3 py-2 border-b border-gray-200 dark:border-gray-800\">\n    <h3 class=\"text-xs font-semibold text-gray-700 dark:text-gray-300 uppercase tracking-wider\">\n      Node Config\n    </h3>\n    <button\n      class=\"p-1 rounded hover:bg-gray-100 dark:hover:bg-gray-800 text-gray-400\"\n      onclick={onClose}\n    >\n      <X size={14} />\n    </button>\n  </div>\n\n  <!-- Form -->\n  <div class=\"flex-1 overflow-auto p-3 space-y-3\">\n    <!-- Label -->\n    <div>\n      <label for=\"node-label-input\" class=\"block text-[10px] font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider mb-1\">\n        Label\n      </label>\n      <input\n        id=\"node-label-input\"\n        type=\"text\"\n        value={localLabel}\n        oninput={(e) => handleLabelChange((e.target as HTMLInputElement).value)}\n        class=\"w-full rounded-lg border border-gray-300 dark:border-gray-700 bg-white dark:bg-gray-800 px-2.5 py-1.5 text-xs text-gray-900 dark:text-gray-100 focus:outline-none focus:ring-2 focus:ring-orange-500\"\n      />\n    </div>\n\n    <div class=\"border-t border-gray-200 dark:border-gray-800 pt-3\">\n      <p class=\"text-[10px] font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider mb-2\">\n        {nodeType.replace('source_', '').replace('sink_', '')} Settings\n      </p>\n    </div>\n\n    {#each fields as field (field.key)}\n      <div>\n        <label class=\"flex items-center gap-1 text-[10px] font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider mb-1\">\n          {field.label}\n          {#if field.required}\n            <span class=\"text-red-400\">*</span>\n          {/if}\n          {#if field.help}\n            <span class=\"relative group\">\n              <HelpCircle size={10} class=\"text-gray-400 cursor-help\" />\n              <span class=\"absolute bottom-full left-1/2 -translate-x-1/2 mb-1 w-48 p-1.5 rounded bg-gray-900 dark:bg-gray-100 text-[9px] text-white dark:text-gray-900 hidden group-hover:block z-50 shadow-lg\">\n                {field.help}\n              </span>\n            </span>\n          {/if}\n        </label>\n\n        {#if field.type === 'text' || field.type === 'password'}\n          <input\n            type={field.type}\n            value={String(getFieldValue(field))}\n            placeholder={field.placeholder}\n            oninput={(e) => handleChange(field.key, (e.target as HTMLInputElement).value)}\n            class=\"w-full rounded-lg border border-gray-300 dark:border-gray-700 bg-white dark:bg-gray-800 px-2.5 py-1.5 text-xs text-gray-900 dark:text-gray-100 focus:outline-none focus:ring-2 focus:ring-orange-500\"\n          />\n        {:else if field.type === 'number'}\n          <input\n            type=\"number\"\n            value={Number(getFieldValue(field)) || 0}\n            oninput={(e) => handleChange(field.key, Number((e.target as HTMLInputElement).value))}\n            class=\"w-full rounded-lg border border-gray-300 dark:border-gray-700 bg-white dark:bg-gray-800 px-2.5 py-1.5 text-xs text-gray-900 dark:text-gray-100 focus:outline-none focus:ring-2 focus:ring-orange-500\"\n          />\n        {:else if field.type === 'select'}\n          <select\n            value={String(getFieldValue(field))}\n            onchange={(e) => handleChange(field.key, (e.target as HTMLSelectElement).value)}\n            class=\"w-full rounded-lg border border-gray-300 dark:border-gray-700 bg-white dark:bg-gray-800 px-2.5 py-1.5 text-xs text-gray-900 dark:text-gray-100 focus:outline-none focus:ring-2 focus:ring-orange-500\"\n          >\n            {#each field.options || [] as opt}\n              <option value={opt.value}>{opt.label}</option>\n            {/each}\n          </select>\n        {:else if field.type === 'textarea'}\n          <textarea\n            value={String(getFieldValue(field))}\n            placeholder={field.placeholder}\n            oninput={(e) => handleChange(field.key, (e.target as HTMLTextAreaElement).value)}\n            rows={3}\n            class=\"w-full rounded-lg border border-gray-300 dark:border-gray-700 bg-white dark:bg-gray-800 px-2.5 py-1.5 text-xs text-gray-900 dark:text-gray-100 focus:outline-none focus:ring-2 focus:ring-orange-500 resize-none font-mono\"\n          ></textarea>\n        {:else if field.type === 'toggle'}\n          <button\n            aria-label={field.label}\n            class=\"relative inline-flex h-5 w-9 items-center rounded-full transition-colors {\n              getFieldValue(field) ? 'bg-orange-500' : 'bg-gray-300 dark:bg-gray-700'\n            }\"\n            onclick={() => {\n              if (field.key === 'auth_enabled') {\n                handleAuthToggle(!getFieldValue(field))\n              } else {\n                handleChange(field.key, !getFieldValue(field))\n              }\n            }}\n          >\n            <span\n              class=\"inline-block h-3.5 w-3.5 transform rounded-full bg-white transition-transform shadow-sm {\n                getFieldValue(field) ? 'translate-x-4' : 'translate-x-0.5'\n              }\"\n            ></span>\n          </button>\n          {#if field.key === 'auth_enabled' && getFieldValue(field) && localConfig['auth_token']}\n            <div class=\"mt-2\">\n              <label class=\"flex items-center gap-1 text-[10px] font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider mb-1\">\n                <Shield size={10} class=\"text-orange-500\" />\n                Bearer Token\n              </label>\n              <div class=\"flex items-center gap-1\">\n                <input\n                  type=\"text\"\n                  value={String(localConfig['auth_token'])}\n                  readonly\n                  class=\"flex-1 rounded-lg border border-gray-300 dark:border-gray-700 bg-gray-50 dark:bg-gray-800/50 px-2.5 py-1.5 text-xs text-gray-700 dark:text-gray-300 font-mono select-all cursor-text\"\n                />\n                <button\n                  class=\"p-1.5 rounded-lg border border-gray-300 dark:border-gray-700 hover:bg-gray-100 dark:hover:bg-gray-800 text-gray-500 shrink-0\"\n                  title=\"Copy token\"\n                  onclick={() => {\n                    navigator.clipboard.writeText(String(localConfig['auth_token']))\n                    copiedToken = true\n                    setTimeout(() => { copiedToken = false }, 2000)\n                  }}\n                >\n                  {#if copiedToken}\n                    <Check size={12} class=\"text-green-500\" />\n                  {:else}\n                    <Copy size={12} />\n                  {/if}\n                </button>\n              </div>\n              <p class=\"text-[9px] text-gray-400 mt-1\">\n                Use header: Authorization: Bearer {String(localConfig['auth_token']).slice(0, 8)}...\n              </p>\n            </div>\n          {/if}\n        {:else if field.type === 'info'}\n          {@const webhookUrl = `${typeof window !== 'undefined' ? window.location.origin : ''}/api/pipelines/webhook/${pipelineId}`}\n          <div class=\"flex items-center gap-1\">\n            <input\n              type=\"text\"\n              value={webhookUrl}\n              readonly\n              class=\"flex-1 rounded-lg border border-gray-300 dark:border-gray-700 bg-gray-50 dark:bg-gray-800/50 px-2.5 py-1.5 text-xs text-gray-700 dark:text-gray-300 font-mono select-all cursor-text\"\n            />\n            <button\n              class=\"p-1.5 rounded-lg border border-gray-300 dark:border-gray-700 hover:bg-gray-100 dark:hover:bg-gray-800 text-gray-500 shrink-0\"\n              title=\"Copy URL\"\n              onclick={() => {\n                navigator.clipboard.writeText(webhookUrl)\n                copied = true\n                setTimeout(() => { copied = false }, 2000)\n              }}\n            >\n              {#if copied}\n                <Check size={12} class=\"text-green-500\" />\n              {:else}\n                <Copy size={12} />\n              {/if}\n            </button>\n          </div>\n        {/if}\n      </div>\n    {/each}\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/pipelines/PipelineCanvas.svelte",
    "content": "<script lang=\"ts\">\n  import {\n    SvelteFlow,\n    Controls,\n    Background,\n    MiniMap,\n    type Node,\n    type Edge,\n    type NodeTypes,\n    type Connection,\n  } from '@xyflow/svelte'\n  import '@xyflow/svelte/dist/style.css'\n  import SourceNode from './nodes/SourceNode.svelte'\n  import SinkNode from './nodes/SinkNode.svelte'\n  import { SOURCE_NODE_TYPES, SINK_NODE_TYPES, type NodeType } from '../../types/pipelines'\n  import { Radio, Webhook, Database, HardDrive } from 'lucide-svelte'\n  import { getTheme } from '../../stores/theme.svelte'\n\n  interface Props {\n    nodes: Node[]\n    edges: Edge[]\n    onNodesChange: (nodes: Node[]) => void\n    onEdgesChange: (edges: Edge[]) => void\n    onConnect: (connection: Connection) => void\n    onNodeClick: (nodeId: string) => void\n    onPaneClick: () => void\n  }\n\n  let { nodes, edges, onNodesChange, onEdgesChange, onConnect, onNodeClick, onPaneClick }: Props = $props()\n\n  const nodeTypes: NodeTypes = {\n    source_kafka: SourceNode as any,\n    source_webhook: SourceNode as any,\n    source_database: SourceNode as any,\n    source_s3: SourceNode as any,\n    sink_clickhouse: SinkNode as any,\n  }\n\n  function onDragStart(e: DragEvent, type: NodeType) {\n    if (e.dataTransfer) {\n      e.dataTransfer.setData('application/pipeline-node', type)\n      e.dataTransfer.effectAllowed = 'move'\n    }\n  }\n\n  function onDragOver(e: DragEvent) {\n    e.preventDefault()\n    if (e.dataTransfer) {\n      e.dataTransfer.dropEffect = 'move'\n    }\n  }\n\n  function onDrop(e: DragEvent) {\n    e.preventDefault()\n    const type = e.dataTransfer?.getData('application/pipeline-node') as NodeType\n    if (!type) return\n\n    // Find the SvelteFlow container and its viewport element\n    const wrapper = e.currentTarget as HTMLElement\n    const flowEl = wrapper.querySelector('.svelte-flow') as HTMLElement\n    if (!flowEl) return\n\n    const viewport = flowEl.querySelector('.svelte-flow__viewport') as HTMLElement\n    if (!viewport) return\n\n    // Parse viewport transform: translate(tx, ty) scale(zoom)\n    const transform = viewport.style.transform\n    const match = transform.match(/translate\\(([^,]+),\\s*([^)]+)\\)\\s*scale\\(([^)]+)\\)/)\n    const tx = match ? parseFloat(match[1]) : 0\n    const ty = match ? parseFloat(match[2]) : 0\n    const zoom = match ? parseFloat(match[3]) : 1\n\n    // Convert screen coordinates to flow coordinates\n    const rect = flowEl.getBoundingClientRect()\n    const position = {\n      x: (e.clientX - rect.left - tx) / zoom,\n      y: (e.clientY - rect.top - ty) / zoom,\n    }\n\n    const allTypes = [...SOURCE_NODE_TYPES, ...SINK_NODE_TYPES]\n    const meta = allTypes.find((t) => t.type === type)\n    const label = meta?.label || type\n\n    const newNode: Node = {\n      id: `node-${Date.now()}`,\n      type,\n      position,\n      data: {\n        label,\n        node_type: type,\n        config: {},\n      },\n    }\n\n    onNodesChange([...nodes, newNode])\n  }\n\n  const sourceIcons: Record<string, typeof Radio> = {\n    source_kafka: Radio,\n    source_webhook: Webhook,\n    source_database: Database,\n    source_s3: HardDrive,\n  }\n</script>\n\n<div class=\"flex flex-1 min-h-0\">\n  <!-- Node Palette -->\n  <div class=\"w-44 border-r border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-950 p-2 overflow-auto\">\n    <p class=\"text-[10px] font-semibold text-gray-500 dark:text-gray-400 uppercase tracking-wider mb-2 px-1\">\n      Sources\n    </p>\n    {#each SOURCE_NODE_TYPES as source}\n      {@const Icon = sourceIcons[source.type] || Radio}\n      <div\n        class=\"flex items-center gap-2 px-2 py-1.5 mb-1 rounded-lg border border-gray-200 dark:border-gray-700 bg-white dark:bg-gray-900 cursor-grab hover:border-orange-300 dark:hover:border-orange-700 transition-colors text-xs\"\n        draggable=\"true\"\n        ondragstart={(e: DragEvent) => onDragStart(e, source.type)}\n        role=\"button\"\n        tabindex={0}\n      >\n        <Icon size={14} class=\"text-gray-500 shrink-0\" />\n        <div class=\"min-w-0\">\n          <div class=\"font-medium text-gray-700 dark:text-gray-300 text-xs\">{source.label}</div>\n          <div class=\"text-[9px] text-gray-400 dark:text-gray-500 truncate\">{source.description}</div>\n        </div>\n      </div>\n    {/each}\n\n    <p class=\"text-[10px] font-semibold text-gray-500 dark:text-gray-400 uppercase tracking-wider mt-3 mb-2 px-1\">\n      Sinks\n    </p>\n    {#each SINK_NODE_TYPES as sink}\n      <div\n        class=\"flex items-center gap-2 px-2 py-1.5 mb-1 rounded-lg border border-orange-200 dark:border-orange-800 bg-orange-50 dark:bg-orange-900/20 cursor-grab hover:border-orange-400 dark:hover:border-orange-600 transition-colors text-xs\"\n        draggable=\"true\"\n        ondragstart={(e: DragEvent) => onDragStart(e, sink.type)}\n        role=\"button\"\n        tabindex={0}\n      >\n        <Database size={14} class=\"text-orange-500 shrink-0\" />\n        <div class=\"min-w-0\">\n          <div class=\"font-medium text-gray-700 dark:text-gray-300 text-xs\">{sink.label}</div>\n          <div class=\"text-[9px] text-gray-400 dark:text-gray-500 truncate\">{sink.description}</div>\n        </div>\n      </div>\n    {/each}\n  </div>\n\n  <!-- Flow Canvas -->\n  <div\n    class=\"flex-1 min-h-0\"\n    ondragover={onDragOver}\n    ondrop={onDrop}\n    role=\"presentation\"\n  >\n    <SvelteFlow\n      {nodes}\n      {edges}\n      {nodeTypes}\n      fitView\n      colorMode={getTheme()}\n      proOptions={{ hideAttribution: true }}\n      onconnect={onConnect}\n      onnodeclick={({ node }) => {\n        if (node?.id) onNodeClick(node.id)\n      }}\n      onpaneclick={() => onPaneClick()}\n      onnodedragstop={({ nodes: draggedNodes }) => {\n        // Update node positions after drag\n        let updated = [...nodes]\n        for (const dn of draggedNodes) {\n          const idx = updated.findIndex((n) => n.id === dn.id)\n          if (idx >= 0) {\n            updated[idx] = { ...updated[idx], position: { ...dn.position } }\n          }\n        }\n        onNodesChange(updated)\n      }}\n      ondelete={({ nodes: deletedNodes, edges: deletedEdges }) => {\n        if (deletedNodes.length > 0) {\n          const deleteIds = new Set(deletedNodes.map((n) => n.id))\n          onNodesChange(nodes.filter((n) => !deleteIds.has(n.id)))\n        }\n        if (deletedEdges.length > 0) {\n          const deleteIds = new Set(deletedEdges.map((e) => e.id))\n          onEdgesChange(edges.filter((e) => !deleteIds.has(e.id)))\n        }\n      }}\n      defaultEdgeOptions={{ animated: true, style: 'stroke: #f97316; stroke-width: 2;' }}\n    >\n      <Controls />\n      <Background gap={16} />\n      <MiniMap />\n    </SvelteFlow>\n  </div>\n</div>\n\n<style>\n  /* Canvas background */\n  :global(.svelte-flow) {\n    background-color: #fafafa;\n  }\n  :global(.svelte-flow.dark) {\n    background-color: #0a0a0a;\n  }\n\n  /* Edge styling */\n  :global(.svelte-flow .svelte-flow__edge-path) {\n    stroke: #f97316;\n    stroke-width: 2;\n  }\n\n  /* Handle sizing */\n  :global(.svelte-flow .svelte-flow__handle) {\n    width: 10px;\n    height: 10px;\n  }\n\n  /* Controls — dark mode */\n  :global(.svelte-flow.dark .svelte-flow__controls) {\n    background: #1a1a1a;\n    border: 1px solid #333;\n    border-radius: 8px;\n    box-shadow: 0 2px 8px rgba(0, 0, 0, 0.4);\n  }\n  :global(.svelte-flow.dark .svelte-flow__controls-button) {\n    background: #1a1a1a;\n    border-color: #333;\n    fill: #a3a3a3;\n    color: #a3a3a3;\n  }\n  :global(.svelte-flow.dark .svelte-flow__controls-button:hover) {\n    background: #2a2a2a;\n    fill: #f97316;\n    color: #f97316;\n  }\n\n  /* Controls — light mode */\n  :global(.svelte-flow .svelte-flow__controls) {\n    border-radius: 8px;\n    box-shadow: 0 1px 4px rgba(0, 0, 0, 0.1);\n  }\n\n  /* MiniMap — dark mode */\n  :global(.svelte-flow.dark .svelte-flow__minimap) {\n    background: #141414;\n    border: 1px solid #333;\n    border-radius: 8px;\n  }\n\n  /* MiniMap — light mode */\n  :global(.svelte-flow .svelte-flow__minimap) {\n    border-radius: 8px;\n    box-shadow: 0 1px 4px rgba(0, 0, 0, 0.1);\n  }\n\n  /* Background dots */\n  :global(.svelte-flow .svelte-flow__background) {\n    --xy-background-pattern-dots-color-default: #e5e5e5;\n  }\n  :global(.svelte-flow.dark .svelte-flow__background) {\n    --xy-background-pattern-dots-color-default: #7e7d7d;\n  }\n</style>\n"
  },
  {
    "path": "ui/src/lib/components/pipelines/PipelineEditor.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import type { Node, Edge, Connection } from '@xyflow/svelte'\n  import type { Pipeline, PipelineStatus, NodeType } from '../../types/pipelines'\n  import * as api from '../../api/pipelines'\n  import { success as toastSuccess, error as toastError } from '../../stores/toast.svelte'\n  import PipelineToolbar from './PipelineToolbar.svelte'\n  import PipelineCanvas from './PipelineCanvas.svelte'\n  import PipelineStatusBar from './PipelineStatusBar.svelte'\n  import NodeConfigPanel from './NodeConfigPanel.svelte'\n  import Spinner from '../common/Spinner.svelte'\n\n  interface Props {\n    pipelineId: string\n    onBack: () => void\n  }\n\n  let { pipelineId, onBack }: Props = $props()\n\n  let pipeline = $state<Pipeline | null>(null)\n  let loading = $state(true)\n  let saving = $state(false)\n\n  let nodes = $state<Node[]>([])\n  let edges = $state<Edge[]>([])\n\n  let selectedNodeId = $state<string | null>(null)\n\n  const selectedNode = $derived(selectedNodeId ? nodes.find((n) => n.id === selectedNodeId) : null)\n\n  onMount(async () => {\n    await loadPipeline()\n  })\n\n  async function loadPipeline() {\n    loading = true\n    try {\n      const res = await api.getPipeline(pipelineId)\n      pipeline = res.pipeline\n\n      // Convert backend graph nodes to Svelte Flow nodes\n      nodes = (res.graph.nodes || []).map((n) => ({\n        id: n.id,\n        type: n.node_type as string,\n        position: { x: n.position_x, y: n.position_y },\n        data: {\n          label: n.label,\n          node_type: n.node_type,\n          config: parseConfig(n.config_encrypted),\n        },\n      }))\n\n      // Convert backend edges to Svelte Flow edges\n      edges = (res.graph.edges || []).map((e) => ({\n        id: e.id,\n        source: e.source_node_id,\n        target: e.target_node_id,\n        sourceHandle: e.source_handle ?? undefined,\n        targetHandle: e.target_handle ?? undefined,\n        animated: true,\n      }))\n    } catch (e: any) {\n      toastError(e.message || 'Failed to load pipeline')\n    } finally {\n      loading = false\n    }\n  }\n\n  function parseConfig(encrypted: string): Record<string, unknown> {\n    try {\n      return JSON.parse(encrypted)\n    } catch {\n      return {}\n    }\n  }\n\n  async function handleSave() {\n    saving = true\n    try {\n      const graphNodes = nodes.map((n) => ({\n        id: n.id,\n        node_type: n.data.node_type as string,\n        label: n.data.label as string,\n        position_x: n.position.x,\n        position_y: n.position.y,\n        config: (n.data.config || {}) as Record<string, unknown>,\n      }))\n\n      const graphEdges = edges.map((e) => ({\n        id: e.id,\n        source_node_id: e.source,\n        target_node_id: e.target,\n        source_handle: e.sourceHandle ?? undefined,\n        target_handle: e.targetHandle ?? undefined,\n      }))\n\n      await api.saveGraph(pipelineId, { nodes: graphNodes, edges: graphEdges })\n      toastSuccess('Pipeline saved')\n    } catch (e: any) {\n      toastError(e.message || 'Failed to save pipeline')\n    } finally {\n      saving = false\n    }\n  }\n\n  async function handleStart() {\n    try {\n      // Save first, then start\n      await handleSave()\n      await api.startPipeline(pipelineId)\n      toastSuccess('Pipeline started')\n      await loadPipeline()\n    } catch (e: any) {\n      toastError(e.message || 'Failed to start pipeline')\n    }\n  }\n\n  async function handleStop() {\n    try {\n      await api.stopPipeline(pipelineId)\n      toastSuccess('Pipeline stopped')\n      await loadPipeline()\n    } catch (e: any) {\n      toastError(e.message || 'Failed to stop pipeline')\n    }\n  }\n\n  function handleConnect(connection: Connection) {\n    const newEdge: Edge = {\n      id: `edge-${Date.now()}`,\n      source: connection.source!,\n      target: connection.target!,\n      sourceHandle: connection.sourceHandle ?? undefined,\n      targetHandle: connection.targetHandle ?? undefined,\n      animated: true,\n    }\n    edges = [...edges, newEdge]\n  }\n\n  function handleNodeClick(nodeId: string) {\n    selectedNodeId = nodeId\n  }\n\n  function handlePaneClick() {\n    selectedNodeId = null\n  }\n\n  function handleNodeConfigUpdate(nodeId: string, config: Record<string, unknown>, label: string) {\n    nodes = nodes.map((n) => {\n      if (n.id !== nodeId) return n\n      return {\n        ...n,\n        data: {\n          ...n.data,\n          config,\n          label,\n        },\n      }\n    })\n  }\n</script>\n\n<div class=\"flex flex-col h-full\">\n  {#if loading}\n    <div class=\"flex items-center justify-center h-full\">\n      <Spinner />\n    </div>\n  {:else if pipeline}\n    <PipelineToolbar\n      pipelineName={pipeline.name}\n      status={pipeline.status as PipelineStatus}\n      {saving}\n      {onBack}\n      onSave={handleSave}\n      onStart={handleStart}\n      onStop={handleStop}\n    />\n\n    <div class=\"flex flex-1 min-h-0\">\n      <PipelineCanvas\n        {nodes}\n        {edges}\n        onNodesChange={(updated) => { nodes = updated }}\n        onEdgesChange={(updated) => { edges = updated }}\n        onConnect={handleConnect}\n        onNodeClick={handleNodeClick}\n        onPaneClick={handlePaneClick}\n      />\n\n      {#if selectedNode}\n        <NodeConfigPanel\n          nodeId={selectedNode.id}\n          nodeType={selectedNode.data.node_type as NodeType}\n          label={selectedNode.data.label as string}\n          config={(selectedNode.data.config || {}) as Record<string, unknown>}\n          {pipelineId}\n          onUpdate={handleNodeConfigUpdate}\n          onClose={() => { selectedNodeId = null }}\n        />\n      {/if}\n    </div>\n\n    <PipelineStatusBar\n      pipelineId={pipelineId}\n      status={pipeline.status as PipelineStatus}\n      onStatusChange={(newStatus) => {\n        if (pipeline) pipeline = { ...pipeline, status: newStatus }\n      }}\n    />\n  {:else}\n    <div class=\"flex items-center justify-center h-full text-gray-500\">\n      Pipeline not found\n    </div>\n  {/if}\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/pipelines/PipelineList.svelte",
    "content": "<script lang=\"ts\">\n  import type { Pipeline } from '../../types/pipelines'\n  import Button from '../common/Button.svelte'\n  import ConfirmDialog from '../common/ConfirmDialog.svelte'\n  import Spinner from '../common/Spinner.svelte'\n  import {\n    Plus,\n    Trash2,\n    Play,\n    Square,\n    Pencil,\n    Workflow,\n    AlertCircle,\n    Clock,\n    Radio,\n  } from 'lucide-svelte'\n\n  interface Props {\n    pipelines: Pipeline[]\n    loading: boolean\n    onCreate: () => void\n    onSelect: (id: string) => void\n    onDelete: (id: string) => void\n    onStart: (id: string) => void\n    onStop: (id: string) => void\n  }\n\n  let { pipelines, loading, onCreate, onSelect, onDelete, onStart, onStop }: Props = $props()\n\n  let confirmDelete = $state<Pipeline | null>(null)\n  let search = $state('')\n\n  const filtered = $derived(\n    search.trim()\n      ? pipelines.filter((p) => p.name.toLowerCase().includes(search.toLowerCase()))\n      : pipelines,\n  )\n\n  function statusColor(status: string): string {\n    switch (status) {\n      case 'running':\n        return 'bg-green-100 text-green-700 dark:bg-green-900/30 dark:text-green-400'\n      case 'error':\n        return 'bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-400'\n      case 'starting':\n      case 'stopping':\n        return 'bg-yellow-100 text-yellow-700 dark:bg-yellow-900/30 dark:text-yellow-400'\n      case 'draft':\n        return 'bg-gray-100 text-gray-600 dark:bg-gray-800 dark:text-gray-400'\n      default:\n        return 'bg-gray-100 text-gray-600 dark:bg-gray-800 dark:text-gray-400'\n    }\n  }\n\n  function statusIcon(status: string) {\n    switch (status) {\n      case 'running':\n        return Radio\n      case 'error':\n        return AlertCircle\n      case 'starting':\n      case 'stopping':\n        return Clock\n      default:\n        return Workflow\n    }\n  }\n\n  function formatDate(date: string): string {\n    return new Date(date).toLocaleDateString(undefined, {\n      month: 'short',\n      day: 'numeric',\n      hour: '2-digit',\n      minute: '2-digit',\n    })\n  }\n</script>\n\n<div class=\"flex flex-col h-full\">\n  <!-- Header -->\n  <div class=\"flex items-center justify-between px-4 py-3 border-b border-gray-200 dark:border-gray-800\">\n    <div class=\"flex items-center gap-3\">\n      <Workflow size={18} class=\"text-ch-blue\" />\n      <h1 class=\"text-lg font-semibold text-gray-900 dark:text-gray-100\">Pipelines</h1>\n      <span class=\"text-xs text-gray-400 dark:text-gray-600\">{pipelines.length} total</span>\n    </div>\n    <div class=\"flex items-center gap-2\">\n      <input\n        bind:value={search}\n        type=\"text\"\n        placeholder=\"Search pipelines...\"\n        class=\"h-8 w-52 rounded-lg border border-gray-300 dark:border-gray-700 bg-white dark:bg-gray-800 px-3 text-xs text-gray-900 dark:text-gray-100 focus:outline-none focus:ring-2 focus:ring-orange-500\"\n      />\n      <Button size=\"sm\" onclick={onCreate}>\n        <Plus size={14} /> New Pipeline\n      </Button>\n    </div>\n  </div>\n\n  <!-- Content -->\n  <div class=\"flex-1 overflow-auto p-4\">\n    {#if loading}\n      <div class=\"flex items-center justify-center py-12\"><Spinner /></div>\n    {:else if filtered.length === 0}\n      <div class=\"text-center py-12 text-gray-500\">\n        <Workflow size={36} class=\"mx-auto mb-2 text-gray-300 dark:text-gray-700\" />\n        {#if search.trim()}\n          <p class=\"mb-1\">No pipelines match \"{search}\"</p>\n        {:else}\n          <p class=\"mb-1\">No pipelines yet</p>\n          <p class=\"text-xs text-gray-400 dark:text-gray-600\">Create a pipeline to start ingesting data into ClickHouse</p>\n        {/if}\n      </div>\n    {:else}\n      <div class=\"grid gap-3 grid-cols-1 md:grid-cols-2 xl:grid-cols-3\">\n        {#each filtered as pipeline (pipeline.id)}\n          <div\n            class=\"group text-left rounded-xl border border-gray-200 dark:border-gray-800 bg-white dark:bg-gray-900 p-4 hover:border-orange-300 dark:hover:border-orange-700 hover:shadow-sm transition-all cursor-pointer\"\n            onclick={() => onSelect(pipeline.id)}\n            onkeydown={(e) => { if (e.key === 'Enter') onSelect(pipeline.id) }}\n            role=\"button\"\n            tabindex={0}\n          >\n            <div class=\"flex items-start justify-between mb-3\">\n              <div class=\"flex-1 min-w-0\">\n                <h3 class=\"font-medium text-sm text-gray-900 dark:text-gray-100 truncate\">{pipeline.name}</h3>\n                {#if pipeline.description}\n                  <p class=\"text-xs text-gray-500 dark:text-gray-400 truncate mt-0.5\">{pipeline.description}</p>\n                {/if}\n              </div>\n              <span class=\"inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[10px] font-medium {statusColor(pipeline.status)}\">\n                {#if pipeline.status === 'running'}\n                  <Radio size={10} />\n                {:else if pipeline.status === 'error'}\n                  <AlertCircle size={10} />\n                {:else if pipeline.status === 'starting' || pipeline.status === 'stopping'}\n                  <Clock size={10} />\n                {:else}\n                  <Workflow size={10} />\n                {/if}\n                {pipeline.status}\n              </span>\n            </div>\n\n            <div class=\"flex items-center justify-between\">\n              <span class=\"text-[10px] text-gray-400 dark:text-gray-600\">\n                Updated {formatDate(pipeline.updated_at)}\n              </span>\n              <!-- svelte-ignore a11y_click_events_have_key_events a11y_no_static_element_interactions -->\n              <div class=\"flex items-center gap-1 opacity-0 group-hover:opacity-100 transition-opacity\" onclick={(e: MouseEvent) => e.stopPropagation()}>\n                {#if pipeline.status === 'running' || pipeline.status === 'starting'}\n                  <button\n                    class=\"p-1 rounded hover:bg-red-100 dark:hover:bg-red-900/30 text-red-500\"\n                    title=\"Stop pipeline\"\n                    onclick={() => onStop(pipeline.id)}\n                  >\n                    <Square size={14} />\n                  </button>\n                {:else}\n                  <button\n                    class=\"p-1 rounded hover:bg-green-100 dark:hover:bg-green-900/30 text-green-500\"\n                    title=\"Start pipeline\"\n                    onclick={() => onStart(pipeline.id)}\n                  >\n                    <Play size={14} />\n                  </button>\n                {/if}\n                <button\n                  class=\"p-1 rounded hover:bg-red-100 dark:hover:bg-red-900/30 text-gray-400 hover:text-red-500\"\n                  title=\"Delete pipeline\"\n                  onclick={() => { confirmDelete = pipeline }}\n                >\n                  <Trash2 size={14} />\n                </button>\n              </div>\n            </div>\n\n            {#if pipeline.last_error && pipeline.status === 'error'}\n              <div class=\"mt-2 px-2 py-1 rounded bg-red-50 dark:bg-red-900/20 text-[10px] text-red-600 dark:text-red-400 truncate\">\n                {pipeline.last_error}\n              </div>\n            {/if}\n          </div>\n        {/each}\n      </div>\n    {/if}\n  </div>\n</div>\n\n<ConfirmDialog\n  open={!!confirmDelete}\n  title=\"Delete pipeline?\"\n  description=\"This will permanently delete the pipeline and all its configuration. This cannot be undone.\"\n  confirmLabel=\"Delete\"\n  destructive={true}\n  onconfirm={() => {\n    if (confirmDelete) {\n      onDelete(confirmDelete.id)\n      confirmDelete = null\n    }\n  }}\n  oncancel={() => { confirmDelete = null }}\n/>\n"
  },
  {
    "path": "ui/src/lib/components/pipelines/PipelineStatusBar.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import type { PipelineStatus } from '../../types/pipelines'\n  import * as api from '../../api/pipelines'\n  import { Activity, Rows3, HardDrive, AlertTriangle, Timer } from 'lucide-svelte'\n\n  interface Props {\n    pipelineId: string\n    status: PipelineStatus\n    onStatusChange?: (status: PipelineStatus) => void\n  }\n\n  let { pipelineId, status, onStatusChange }: Props = $props()\n\n  let rowsIngested = $state(0)\n  let bytesIngested = $state(0)\n  let batchesSent = $state(0)\n  let errorsCount = $state(0)\n  let pollTimer = $state<ReturnType<typeof setInterval> | null>(null)\n\n  const isRunning = $derived(status === 'running' || status === 'starting')\n\n  onMount(() => {\n    if (isRunning) {\n      startPolling()\n    }\n    return () => {\n      if (pollTimer) clearInterval(pollTimer)\n    }\n  })\n\n  $effect(() => {\n    if (isRunning && !pollTimer) {\n      startPolling()\n    } else if (!isRunning && pollTimer) {\n      clearInterval(pollTimer)\n      pollTimer = null\n    }\n  })\n\n  function startPolling() {\n    fetchStatus()\n    pollTimer = setInterval(fetchStatus, 5000)\n  }\n\n  async function fetchStatus() {\n    try {\n      const res = await api.getPipelineStatus(pipelineId)\n      rowsIngested = res.rows_ingested ?? 0\n      bytesIngested = res.bytes_ingested ?? 0\n      batchesSent = res.batches_sent ?? 0\n      errorsCount = res.errors_count ?? 0\n\n      if (res.status !== status && onStatusChange) {\n        onStatusChange(res.status as PipelineStatus)\n      }\n    } catch {\n      // Silently handle polling errors\n    }\n  }\n\n  function formatBytes(bytes: number): string {\n    if (bytes === 0) return '0 B'\n    const units = ['B', 'KB', 'MB', 'GB', 'TB']\n    const i = Math.floor(Math.log(bytes) / Math.log(1024))\n    return `${(bytes / Math.pow(1024, i)).toFixed(i > 0 ? 1 : 0)} ${units[i]}`\n  }\n\n  function formatNumber(n: number): string {\n    if (n >= 1_000_000) return `${(n / 1_000_000).toFixed(1)}M`\n    if (n >= 1_000) return `${(n / 1_000).toFixed(1)}K`\n    return n.toString()\n  }\n</script>\n\n{#if isRunning || rowsIngested > 0}\n  <div class=\"flex items-center gap-4 px-4 py-1.5 border-t border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-950 text-[11px] text-gray-500 dark:text-gray-400\">\n    {#if isRunning}\n      <span class=\"flex items-center gap-1 text-green-500\">\n        <Activity size={12} class=\"animate-pulse\" />\n        Running\n      </span>\n    {/if}\n\n    <span class=\"flex items-center gap-1\" title=\"Rows ingested\">\n      <Rows3 size={12} />\n      {formatNumber(rowsIngested)} rows\n    </span>\n\n    <span class=\"flex items-center gap-1\" title=\"Bytes ingested\">\n      <HardDrive size={12} />\n      {formatBytes(bytesIngested)}\n    </span>\n\n    <span class=\"flex items-center gap-1\" title=\"Batches sent\">\n      <Timer size={12} />\n      {formatNumber(batchesSent)} batches\n    </span>\n\n    {#if errorsCount > 0}\n      <span class=\"flex items-center gap-1 text-red-500\" title=\"Errors\">\n        <AlertTriangle size={12} />\n        {formatNumber(errorsCount)} errors\n      </span>\n    {/if}\n  </div>\n{/if}\n"
  },
  {
    "path": "ui/src/lib/components/pipelines/PipelineToolbar.svelte",
    "content": "<script lang=\"ts\">\n  import type { PipelineStatus } from '../../types/pipelines'\n  import Button from '../common/Button.svelte'\n  import { ArrowLeft, Save, Play, Square, RotateCcw } from 'lucide-svelte'\n\n  interface Props {\n    pipelineName: string\n    status: PipelineStatus\n    saving: boolean\n    onBack: () => void\n    onSave: () => void\n    onStart: () => void\n    onStop: () => void\n  }\n\n  let { pipelineName, status, saving, onBack, onSave, onStart, onStop }: Props = $props()\n\n  const isRunning = $derived(status === 'running' || status === 'starting')\n</script>\n\n<div class=\"flex items-center justify-between px-3 py-2 border-b border-gray-200 dark:border-gray-800 bg-white dark:bg-gray-950\">\n  <div class=\"flex items-center gap-2\">\n    <button\n      class=\"p-1.5 rounded-lg hover:bg-gray-100 dark:hover:bg-gray-800 text-gray-500 dark:text-gray-400\"\n      onclick={onBack}\n      title=\"Back to pipelines\"\n    >\n      <ArrowLeft size={16} />\n    </button>\n    <h2 class=\"text-sm font-semibold text-gray-900 dark:text-gray-100 truncate max-w-[300px]\">\n      {pipelineName}\n    </h2>\n    <span class=\"text-[10px] px-1.5 py-0.5 rounded-full font-medium {\n      status === 'running' ? 'bg-green-100 text-green-700 dark:bg-green-900/30 dark:text-green-400' :\n      status === 'error' ? 'bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-400' :\n      status === 'draft' ? 'bg-gray-100 text-gray-600 dark:bg-gray-800 dark:text-gray-400' :\n      'bg-gray-100 text-gray-600 dark:bg-gray-800 dark:text-gray-400'\n    }\">{status}</span>\n  </div>\n\n  <div class=\"flex items-center gap-2\">\n    <Button size=\"sm\" variant=\"secondary\" onclick={onSave} loading={saving}>\n      <Save size={14} /> Save\n    </Button>\n    {#if isRunning}\n      <Button size=\"sm\" variant=\"danger\" onclick={onStop}>\n        <Square size={14} /> Stop\n      </Button>\n    {:else}\n      <Button size=\"sm\" onclick={onStart}>\n        <Play size={14} /> Run\n      </Button>\n    {/if}\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/pipelines/nodes/SinkNode.svelte",
    "content": "<script lang=\"ts\">\n  import { Handle, Position } from '@xyflow/svelte'\n  import { Database } from 'lucide-svelte'\n\n  interface Props {\n    data: {\n      label: string\n      node_type: string\n      config?: Record<string, unknown>\n    }\n  }\n\n  let { data }: Props = $props()\n</script>\n\n<div class=\"rounded-lg border-2 border-orange-400 dark:border-orange-600 bg-orange-50 dark:bg-orange-900/20 shadow-sm min-w-[160px]\">\n  <div class=\"flex items-center gap-2 px-3 py-2\">\n    <Database size={16} class=\"text-orange-600 dark:text-orange-400 shrink-0\" />\n    <div class=\"min-w-0\">\n      <div class=\"text-xs font-medium text-gray-800 dark:text-gray-200 truncate\">{data.label}</div>\n      <div class=\"text-[10px] text-gray-500 dark:text-gray-400\">ClickHouse</div>\n    </div>\n  </div>\n  <Handle type=\"target\" position={Position.Left} class=\"!bg-orange-500 !w-3 !h-3 !border-2 !border-white dark:!border-gray-900\" />\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/pipelines/nodes/SourceNode.svelte",
    "content": "<script lang=\"ts\">\n  import { Handle, Position } from '@xyflow/svelte'\n  import { Radio, Webhook, Database, HardDrive } from 'lucide-svelte'\n\n  interface Props {\n    data: {\n      label: string\n      node_type: string\n      config?: Record<string, unknown>\n    }\n  }\n\n  let { data }: Props = $props()\n\n  const iconMap: Record<string, typeof Radio> = {\n    source_kafka: Radio,\n    source_webhook: Webhook,\n    source_database: Database,\n    source_s3: HardDrive,\n  }\n\n  const colorMap: Record<string, string> = {\n    source_kafka: 'border-purple-400 dark:border-purple-600',\n    source_webhook: 'border-blue-400 dark:border-blue-600',\n    source_database: 'border-emerald-400 dark:border-emerald-600',\n    source_s3: 'border-amber-400 dark:border-amber-600',\n  }\n\n  const bgMap: Record<string, string> = {\n    source_kafka: 'bg-purple-50 dark:bg-purple-900/20',\n    source_webhook: 'bg-blue-50 dark:bg-blue-900/20',\n    source_database: 'bg-emerald-50 dark:bg-emerald-900/20',\n    source_s3: 'bg-amber-50 dark:bg-amber-900/20',\n  }\n\n  const Icon = $derived(iconMap[data.node_type] || Radio)\n  const borderColor = $derived(colorMap[data.node_type] || 'border-gray-300 dark:border-gray-700')\n  const bgColor = $derived(bgMap[data.node_type] || 'bg-gray-50 dark:bg-gray-900')\n</script>\n\n<div class=\"rounded-lg border-2 {borderColor} {bgColor} shadow-sm min-w-[160px]\">\n  <div class=\"flex items-center gap-2 px-3 py-2\">\n    <Icon size={16} class=\"text-gray-600 dark:text-gray-300 shrink-0\" />\n    <div class=\"min-w-0\">\n      <div class=\"text-xs font-medium text-gray-800 dark:text-gray-200 truncate\">{data.label}</div>\n      <div class=\"text-[10px] text-gray-500 dark:text-gray-400\">{data.node_type.replace('source_', '')}</div>\n    </div>\n  </div>\n  <Handle type=\"source\" position={Position.Right} class=\"!bg-orange-500 !w-3 !h-3 !border-2 !border-white dark:!border-gray-900\" />\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/table/Pagination.svelte",
    "content": "<script lang=\"ts\">\n  import { ChevronLeft, ChevronRight } from 'lucide-svelte'\n  import { formatNumber } from '../../utils/format'\n\n  interface Props {\n    page: number\n    pageSize: number\n    totalRows: number\n    onchange: (page: number) => void\n  }\n\n  let { page, pageSize, totalRows, onchange }: Props = $props()\n\n  const totalPages = $derived(Math.max(1, Math.ceil(totalRows / pageSize)))\n  const from = $derived(page * pageSize + 1)\n  const to = $derived(Math.min((page + 1) * pageSize, totalRows))\n</script>\n\n<div class=\"flex items-center justify-between px-3 py-2 border-t border-gray-200 dark:border-gray-800 bg-gray-100/50 dark:bg-gray-900/50 text-xs text-gray-500 dark:text-gray-400\">\n  <span>\n    {formatNumber(from)}-{formatNumber(to)} of {formatNumber(totalRows)} rows\n  </span>\n\n  <div class=\"flex items-center gap-1\">\n    <button\n      class=\"p-1 rounded hover:bg-gray-200 dark:hover:bg-gray-800 disabled:opacity-30 disabled:cursor-not-allowed\"\n      disabled={page === 0}\n      onclick={() => onchange(page - 1)}\n    >\n      <ChevronLeft size={14} />\n    </button>\n    <span class=\"px-2\">\n      Page {page + 1} of {totalPages}\n    </span>\n    <button\n      class=\"p-1 rounded hover:bg-gray-200 dark:hover:bg-gray-800 disabled:opacity-30 disabled:cursor-not-allowed\"\n      disabled={page >= totalPages - 1}\n      onclick={() => onchange(page + 1)}\n    >\n      <ChevronRight size={14} />\n    </button>\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/lib/components/table/TableCell.svelte",
    "content": "<script lang=\"ts\">\n  import { getDisplayType } from '../../utils/ch-types'\n  import { getFormatNumbers } from '../../stores/number-format.svelte'\n\n  interface Props {\n    value: unknown\n    type: string\n    width: number\n  }\n\n  let { value, type, width }: Props = $props()\n\n  const displayType = $derived(getDisplayType(type))\n\n  const rawValue = $derived.by(() => {\n    if (value === null || value === undefined) return 'NULL'\n    if (typeof value === 'string') return value\n    if (typeof value === 'object') return JSON.stringify(value)\n    return String(value)\n  })\n\n  const formatted = $derived.by(() => {\n    if (value === null || value === undefined) return null\n    if (displayType === 'number') {\n      if (typeof value === 'number') {\n        return getFormatNumbers() ? value.toLocaleString() : String(value)\n      }\n      // Large integers arrive as strings to preserve precision (> Number.MAX_SAFE_INTEGER)\n      if (typeof value === 'string' && /^-?\\d+$/.test(value)) {\n        return getFormatNumbers() ? BigInt(value).toLocaleString() : value\n      }\n    }\n    if (displayType === 'json' && typeof value === 'object') {\n      return JSON.stringify(value)\n    }\n    return String(value)\n  })\n\n  const isNull = $derived(value === null || value === undefined)\n  const isBigIntString = $derived(displayType === 'number' && typeof value === 'string' && /^-?\\d+$/.test(value as string))\n  const align = $derived(displayType === 'number' || isBigIntString ? 'text-right' : 'text-left')\n  const isUrl = $derived(displayType === 'string' && typeof value === 'string' && /^https?:\\/\\//i.test(value))\n\n  async function handleCopyCell() {\n    if (typeof navigator === 'undefined' || isNull) return\n    try {\n      await navigator.clipboard.writeText(rawValue)\n    } catch {\n      // Clipboard failures are non-fatal and should not interrupt navigation.\n    }\n  }\n</script>\n\n<td\n  class=\"px-2.5 truncate border-r border-gray-200/60 dark:border-gray-800/60 {align}\"\n  style=\"width:{width}px;max-width:{width}px;min-width:{width}px\"\n  title={isNull ? 'NULL' : `${rawValue}\\n\\nDouble-click to copy`}\n  ondblclick={handleCopyCell}\n>\n  {#if isNull}\n    <span class=\"inline-flex items-center rounded-sm px-1 py-0.5 text-[10px] font-medium uppercase tracking-wide bg-gray-200/80 dark:bg-gray-800 text-gray-500 dark:text-gray-400\">Null</span>\n  {:else if displayType === 'bool'}\n    <span class=\"inline-flex items-center rounded-sm px-1.5 py-0.5 text-[10px] font-semibold uppercase tracking-wide {value ? 'text-emerald-700 dark:text-emerald-300 bg-emerald-100/70 dark:bg-emerald-500/12' : 'text-rose-700 dark:text-rose-300 bg-rose-100/70 dark:bg-rose-500/12'}\">\n      {String(value)}\n    </span>\n  {:else if isUrl}\n    <span class=\"font-mono text-[12px] text-orange-700 dark:text-orange-300\">{formatted}</span>\n  {:else if displayType === 'json'}\n    <span class=\"font-mono text-xs text-gray-500 dark:text-gray-400\">{formatted}</span>\n  {:else if displayType === 'number' || displayType === 'date'}\n    <span class=\"font-mono tabular-nums text-[12px] {displayType === 'number' ? 'text-gray-800 dark:text-gray-200' : 'text-gray-700 dark:text-gray-300'}\">{formatted}</span>\n  {:else}\n    <span class=\"text-[12px] text-gray-700 dark:text-gray-300\">{formatted}</span>\n  {/if}\n</td>\n"
  },
  {
    "path": "ui/src/lib/components/table/TableHeader.svelte",
    "content": "<script lang=\"ts\">\n  import type { ColumnMeta } from '../../types/query'\n  import { getDisplayType, isRightAligned } from '../../utils/ch-types'\n  import { ArrowUp, ArrowDown } from 'lucide-svelte'\n\n  interface Props {\n    columns: ColumnMeta[]\n    widths: number[]\n    sortColumn?: string\n    sortDir?: 'asc' | 'desc'\n    onsort?: (column: string) => void\n    onresize?: (index: number, width: number) => void\n    onfitcolumn?: (index: number) => void\n    onfitall?: () => void\n  }\n\n  let { columns, widths, sortColumn = '', sortDir = 'asc', onsort, onresize, onfitcolumn, onfitall }: Props = $props()\n\n  let resizing = $state<{ index: number; startX: number; startWidth: number } | null>(null)\n\n  function handleMouseDown(e: MouseEvent, index: number) {\n    e.preventDefault()\n    e.stopPropagation()\n    resizing = { index, startX: e.clientX, startWidth: widths[index] }\n    window.addEventListener('mousemove', handleMouseMove)\n    window.addEventListener('mouseup', handleMouseUp)\n  }\n\n  function handleMouseMove(e: MouseEvent) {\n    if (!resizing) return\n    const delta = e.clientX - resizing.startX\n    const newWidth = Math.max(50, resizing.startWidth + delta)\n    onresize?.(resizing.index, newWidth)\n  }\n\n  function handleMouseUp() {\n    resizing = null\n    window.removeEventListener('mousemove', handleMouseMove)\n    window.removeEventListener('mouseup', handleMouseUp)\n  }\n\n  function handleFitColumn(e: MouseEvent, index: number) {\n    e.preventDefault()\n    e.stopPropagation()\n    onfitcolumn?.(index)\n  }\n\n  function compactTypeLabel(type: string): string {\n    const normalized = type\n      .replace(/^Nullable\\((.+)\\)$/, '$1')\n      .replace(/^LowCardinality\\((.+)\\)$/, '$1')\n      .replace(/\\s+/g, '')\n\n    if (normalized.length <= 16) return normalized\n    return `${normalized.slice(0, 15)}…`\n  }\n\n  function typeTone(type: string): string {\n    switch (getDisplayType(type)) {\n      case 'number':\n        return 'text-orange-700 dark:text-orange-300 bg-orange-100/70 dark:bg-orange-500/15 border-orange-200/70 dark:border-orange-500/25'\n      case 'date':\n        return 'text-emerald-700 dark:text-emerald-300 bg-emerald-100/70 dark:bg-emerald-500/15 border-emerald-200/70 dark:border-emerald-500/25'\n      case 'bool':\n        return 'text-fuchsia-700 dark:text-fuchsia-300 bg-fuchsia-100/70 dark:bg-fuchsia-500/12 border-fuchsia-200/70 dark:border-fuchsia-500/20'\n      case 'json':\n        return 'text-sky-700 dark:text-sky-300 bg-sky-100/70 dark:bg-sky-500/12 border-sky-200/70 dark:border-sky-500/20'\n      default:\n        return 'text-gray-600 dark:text-gray-300 bg-gray-100/80 dark:bg-gray-800/80 border-gray-200/75 dark:border-gray-700/75'\n    }\n  }\n</script>\n\n<thead class=\"sticky top-0 z-10 bg-gray-100 dark:bg-gray-900\">\n  <tr class=\"border-b border-gray-300 dark:border-gray-700\">\n    <!-- Row number header -->\n    <th\n      class=\"sticky left-0 z-20 px-2.5 py-2 text-xs font-semibold text-gray-700 dark:text-gray-200 border-r border-gray-200 dark:border-gray-800 text-center select-none bg-gray-100 dark:bg-gray-900\"\n      style=\"width:60px;max-width:60px;min-width:60px\"\n      ondblclick={() => onfitall?.()}\n      title=\"Double-click to auto-fit all columns\"\n    >#</th>\n    {#each columns as col, i}\n      <th\n        class=\"px-2.5 py-2 text-xs font-medium text-gray-500 dark:text-gray-400 border-r border-gray-200/60 dark:border-gray-800/60 select-none relative group bg-gray-100 dark:bg-gray-900\n          {isRightAligned(col.type) ? 'text-right' : 'text-left'}\"\n        style=\"width:{widths[i]}px;max-width:{widths[i]}px;min-width:{widths[i]}px\"\n      >\n        {#if onsort}\n          <button\n            type=\"button\"\n            class=\"w-full flex items-center gap-2 min-w-0 {isRightAligned(col.type) ? 'justify-end' : ''} hover:text-gray-800 dark:hover:text-gray-200\"\n            onclick={() => onsort?.(col.name)}\n          >\n            <span class=\"truncate font-semibold text-[11px]\" title={col.name}>{col.name}</span>\n            <span class=\"inline-flex shrink-0 items-center rounded px-1.5 py-0.5 text-[10px] font-semibold border {typeTone(col.type)}\">\n              {compactTypeLabel(col.type)}\n            </span>\n            {#if sortColumn === col.name}\n              {#if sortDir === 'asc'}\n                <ArrowUp size={12} class=\"shrink-0\" />\n              {:else}\n                <ArrowDown size={12} class=\"shrink-0\" />\n              {/if}\n            {/if}\n          </button>\n        {:else}\n          <div class=\"w-full flex items-center gap-2 min-w-0 {isRightAligned(col.type) ? 'justify-end' : ''}\">\n            <span class=\"truncate font-semibold text-[11px]\" title={col.name}>{col.name}</span>\n            <span class=\"inline-flex shrink-0 items-center rounded px-1.5 py-0.5 text-[10px] font-semibold border {typeTone(col.type)}\">\n              {compactTypeLabel(col.type)}\n            </span>\n          </div>\n        {/if}\n\n        <!-- Resize handle -->\n        {#if onresize}\n          <button\n            type=\"button\"\n            class=\"absolute right-0 top-0 bottom-0 w-2.5 cursor-col-resize opacity-0 group-hover:opacity-100 hover:bg-ch-orange/35 active:bg-ch-orange/45 transition-opacity\"\n            onmousedown={(e) => handleMouseDown(e, i)}\n            ondblclick={(e) => handleFitColumn(e, i)}\n            onclick={(e) => e.stopPropagation()}\n            title=\"Drag to resize, double-click to auto-fit\"\n            aria-label={`Resize ${col.name} column`}\n          ></button>\n        {/if}\n      </th>\n    {/each}\n  </tr>\n</thead>\n"
  },
  {
    "path": "ui/src/lib/components/table/VirtualTable.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount, untrack } from 'svelte'\n  import type { ColumnMeta } from '../../types/query'\n  import TableHeader from './TableHeader.svelte'\n  import TableCell from './TableCell.svelte'\n  import { SearchX } from 'lucide-svelte'\n  import { getFormatNumbers } from '../../stores/number-format.svelte'\n\n  const ROW_HEIGHT = 34\n  const OVERSCAN = 5\n  const ROW_NUMBER_WIDTH = 60\n  const MIN_COL_WIDTH = 120\n  const MAX_COL_WIDTH = 720\n  const SAMPLE_ROWS = 160\n\n  interface Props {\n    meta: ColumnMeta[]\n    data: unknown[][]\n    totalRows?: number\n    sortColumn?: string\n    sortDir?: 'asc' | 'desc'\n    onsort?: (column: string) => void\n  }\n\n  let { meta, data, totalRows, sortColumn = '', sortDir = 'asc', onsort }: Props = $props()\n\n  let container: HTMLDivElement\n  let scrollTop = $state(0)\n  let viewportHeight = $state(600)\n  let viewportWidth = $state(900)\n  let widths = $state<number[]>([])\n  let baseWidths = $state<number[]>([])\n  let manualTouched = $state<boolean[]>([])\n  let selectedRow = $state<number | null>(null)\n\n  function estimateTextWidth(text: string): number {\n    return Math.max(0, Math.ceil(text.length * 7.4))\n  }\n\n  function estimateValueWidth(value: unknown): number {\n    if (value === null || value === undefined) return 34\n    if (typeof value === 'number') {\n      const display = getFormatNumbers() ? value.toLocaleString() : String(value)\n      return Math.max(60, estimateTextWidth(display) + 18)\n    }\n    if (typeof value === 'boolean') return 58\n    if (typeof value === 'string') {\n      const str = value.length > 80 ? value.slice(0, 80) : value\n      return Math.max(74, estimateTextWidth(str) + 18)\n    }\n    return Math.max(90, estimateTextWidth(JSON.stringify(value).slice(0, 80)) + 20)\n  }\n\n  function compactTypeLabel(type: string): string {\n    const normalized = type\n      .replace(/^Nullable\\((.+)\\)$/, '$1')\n      .replace(/^LowCardinality\\((.+)\\)$/, '$1')\n      .replace(/\\s+/g, '')\n\n    if (normalized.length <= 16) return normalized\n    return `${normalized.slice(0, 15)}…`\n  }\n\n  function sampleRows(rows: unknown[][]): unknown[][] {\n    if (rows.length <= SAMPLE_ROWS) return rows\n    const sampled: unknown[][] = []\n    const step = rows.length / SAMPLE_ROWS\n    for (let i = 0; i < SAMPLE_ROWS; i++) {\n      const idx = Math.floor(i * step)\n      sampled.push(rows[idx] ?? rows[rows.length - 1])\n    }\n    return sampled\n  }\n\n  function buildBaseWidths(columns: ColumnMeta[], rows: unknown[][]): number[] {\n    if (!columns.length) return []\n    const sampled = sampleRows(rows)\n\n    return columns.map((col, ci) => {\n      const metricWidths: number[] = []\n      const typeLabel = compactTypeLabel(col.type)\n      const headerMinWidth = estimateTextWidth(col.name) + Math.min(estimateTextWidth(typeLabel), 124) + 62\n      let width = Math.max(\n        estimateTextWidth(col.name) + 40,\n        estimateTextWidth(col.type) + 28,\n        headerMinWidth,\n      )\n\n      for (let ri = 0; ri < sampled.length; ri++) {\n        const valueWidth = estimateValueWidth(sampled[ri]?.[ci])\n        metricWidths.push(valueWidth)\n        width = Math.max(width, valueWidth)\n      }\n\n      if (metricWidths.length > 0) {\n        const sorted = [...metricWidths].sort((a, b) => a - b)\n        const p90 = sorted[Math.floor((sorted.length - 1) * 0.9)] ?? width\n        width = Math.max(width, Math.round(p90))\n      }\n\n      if (/^Date(Time)?/.test(col.type)) width = Math.max(width, 140)\n      if (/^(U?Int|Float|Decimal)/.test(col.type)) width = Math.max(width, 124)\n      if (/UUID|FixedString/.test(col.type)) width = Math.max(width, 180)\n\n      return Math.max(MIN_COL_WIDTH, Math.min(MAX_COL_WIDTH, Math.round(width)))\n    })\n  }\n\n  function distributeToViewport(source: number[], touched: boolean[], viewport: number): number[] {\n    if (!source.length) return []\n    const available = Math.max(0, viewport - ROW_NUMBER_WIDTH)\n    const total = source.reduce((sum, w) => sum + w, 0)\n    if (available <= 0 || total >= available) return source\n\n    const extra = available - total\n    const autoIndices = source.map((_, i) => i).filter((i) => !touched[i])\n    const targets = autoIndices.length > 0 ? autoIndices : source.map((_, i) => i)\n    const targetSet = new Set(targets)\n    const weightSum = targets.reduce((sum, i) => sum + Math.max(1, Math.sqrt(source[i])), 0)\n\n    let consumed = 0\n    const grown = source.map((w, i) => {\n      if (!targetSet.has(i)) return w\n      const gain = Math.floor((extra * Math.max(1, Math.sqrt(source[i]))) / weightSum)\n      consumed += gain\n      return w + gain\n    })\n\n    // Allocate remaining pixels so width matches viewport exactly.\n    let remainder = extra - consumed\n    for (let i = 0; i < grown.length && remainder > 0; i++) {\n      if (targetSet.has(i)) {\n        grown[i] += 1\n        remainder--\n      }\n    }\n\n    return grown\n  }\n\n  function syncViewport() {\n    if (!container) return\n    viewportHeight = container.clientHeight\n    viewportWidth = container.clientWidth\n  }\n\n  // Initialize and keep widths in sync with new result sets.\n  $effect(() => {\n    const columns = meta\n    const rows = data\n    const _fmt = getFormatNumbers()\n    if (!columns.length) {\n      widths = []\n      baseWidths = []\n      manualTouched = []\n      return\n    }\n\n    const nextBase = buildBaseWidths(columns, rows)\n    baseWidths = nextBase\n\n    const currentWidths = untrack(() => widths)\n    const currentTouched = untrack(() => manualTouched)\n\n    if (currentWidths.length !== columns.length || currentTouched.length !== columns.length) {\n      widths = [...nextBase]\n      manualTouched = Array.from({ length: columns.length }, () => false)\n      return\n    }\n\n    const nextWidths = currentWidths.map((w, i) => (currentTouched[i] ? w : nextBase[i]))\n    const changed = nextWidths.some((w, i) => w !== currentWidths[i])\n    if (changed) widths = nextWidths\n  })\n\n  // Keep viewport metrics fresh when result sets or panels change size.\n  $effect(() => {\n    meta\n    data\n    const raf = requestAnimationFrame(syncViewport)\n    return () => cancelAnimationFrame(raf)\n  })\n\n  const rowCount = $derived(data.length)\n  const totalHeight = $derived(rowCount * ROW_HEIGHT)\n  const startIdx = $derived(Math.max(0, Math.floor(scrollTop / ROW_HEIGHT) - OVERSCAN))\n  const endIdx = $derived(Math.min(rowCount, Math.ceil((scrollTop + viewportHeight) / ROW_HEIGHT) + OVERSCAN))\n  const visibleRows = $derived(data.slice(startIdx, endIdx))\n  const topPad = $derived(startIdx * ROW_HEIGHT)\n  const effectiveWidths = $derived(distributeToViewport(widths, manualTouched, viewportWidth))\n  const tableWidth = $derived(ROW_NUMBER_WIDTH + effectiveWidths.reduce((sum, w) => sum + w, 0))\n\n  function handleScroll() {\n    scrollTop = container.scrollTop\n  }\n\n  function handleResize(index: number, width: number) {\n    widths = widths.map((w, i) => i === index ? Math.max(MIN_COL_WIDTH, Math.min(MAX_COL_WIDTH, Math.round(width))) : w)\n    manualTouched = manualTouched.map((t, i) => i === index ? true : t)\n  }\n\n  function handleFitColumn(index: number) {\n    if (!baseWidths[index]) return\n    widths = widths.map((w, i) => i === index ? baseWidths[index] : w)\n    manualTouched = manualTouched.map((t, i) => i === index ? false : t)\n  }\n\n  function handleFitAll() {\n    widths = [...baseWidths]\n    manualTouched = manualTouched.map(() => false)\n  }\n\n  onMount(() => {\n    if (!container) return\n\n    syncViewport()\n\n    const observer = new ResizeObserver(() => syncViewport())\n    observer.observe(container)\n    window.addEventListener('resize', syncViewport)\n\n    return () => {\n      observer.disconnect()\n      window.removeEventListener('resize', syncViewport)\n    }\n  })\n</script>\n\n<div bind:this={container} class=\"relative flex-1 overflow-auto bg-white dark:bg-gray-950\" onscroll={handleScroll}>\n  <table class=\"text-sm border-collapse table-fixed\" style=\"width:{tableWidth}px;min-width:{tableWidth}px\">\n    <colgroup>\n      <col style=\"width:{ROW_NUMBER_WIDTH}px;min-width:{ROW_NUMBER_WIDTH}px;max-width:{ROW_NUMBER_WIDTH}px\" />\n      {#each effectiveWidths as width}\n        <col style=\"width:{width}px;min-width:{width}px;max-width:{width}px\" />\n      {/each}\n    </colgroup>\n    <TableHeader\n      columns={meta}\n      widths={effectiveWidths}\n      {sortColumn}\n      {sortDir}\n      {onsort}\n      onresize={handleResize}\n      onfitcolumn={handleFitColumn}\n      onfitall={handleFitAll}\n    />\n    {#if rowCount > 0}\n      <tbody>\n        <!-- Spacer for virtual scroll -->\n        <tr style=\"height:{topPad}px\" aria-hidden=\"true\"><td colspan={meta.length + 1}></td></tr>\n\n        {#each visibleRows as row, vi (startIdx + vi)}\n          {@const absIdx = startIdx + vi}\n          <tr\n            class=\"group h-[34px] border-b border-gray-100 dark:border-gray-900 hover:bg-orange-50/70 dark:hover:bg-ch-orange/8 cursor-default\n              {absIdx % 2 === 1 ? 'bg-gray-50 dark:bg-gray-900' : 'bg-white dark:bg-gray-950'}\n              {selectedRow === absIdx ? 'bg-orange-100 dark:bg-orange-950' : ''}\"\n            onclick={() => selectedRow = absIdx}\n          >\n            <td\n              class=\"sticky left-0 z-[2] px-2.5 text-right text-xs font-semibold text-gray-700 dark:text-gray-200 border-r border-gray-200 dark:border-gray-800 tabular-nums select-none\n                {absIdx % 2 === 1 ? 'bg-gray-50 dark:bg-gray-900' : 'bg-white dark:bg-gray-950'}\n                {selectedRow === absIdx ? 'bg-orange-100 dark:bg-orange-950' : ''}\n                group-hover:bg-orange-50 dark:group-hover:bg-orange-900/60\"\n              style=\"width:{ROW_NUMBER_WIDTH}px;max-width:{ROW_NUMBER_WIDTH}px;min-width:{ROW_NUMBER_WIDTH}px\"\n            >{absIdx + 1}</td>\n            {#each meta as col, ci}\n              <TableCell\n                value={row[ci]}\n                type={col.type}\n                width={effectiveWidths[ci] ?? 120}\n              />\n            {/each}\n          </tr>\n        {/each}\n\n        <!-- Bottom spacer -->\n        <tr style=\"height:{Math.max(0, totalHeight - (endIdx * ROW_HEIGHT))}px\" aria-hidden=\"true\"><td colspan={meta.length + 1}></td></tr>\n      </tbody>\n    {/if}\n  </table>\n\n  {#if rowCount === 0 && meta.length > 0}\n    <div class=\"absolute inset-x-0 top-[35px] bottom-0 grid place-items-center p-6 pointer-events-none\">\n      <div class=\"max-w-md rounded-xl border border-gray-200/80 dark:border-gray-800/80 bg-gray-50/90 dark:bg-gray-900/65 px-6 py-5 text-center shadow-lg\">\n        <div class=\"mx-auto mb-3 inline-flex h-10 w-10 items-center justify-center rounded-full bg-gray-100 dark:bg-gray-800 text-gray-500 dark:text-gray-300\">\n          <SearchX size={18} />\n        </div>\n        <p class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">No rows returned</p>\n        <p class=\"mt-1 text-xs text-gray-500 dark:text-gray-400\">\n          Your query executed successfully, but nothing matched the current filters.\n        </p>\n      </div>\n    </div>\n  {/if}\n</div>\n"
  },
  {
    "path": "ui/src/lib/editor/completions.ts",
    "content": "import type { CompletionContext, CompletionResult, Completion } from '@codemirror/autocomplete'\nimport { snippetCompletion } from '@codemirror/autocomplete'\nimport { fetchCompletions, listColumns, listTables } from '../api/query'\nimport { getDatabases, loadDatabases } from '../stores/schema.svelte'\nimport type { Column } from '../types/schema'\n\n// ── Cached server completions ───────────────────────────────────\n\nlet cachedFunctions: string[] | null = null\nlet cachedKeywords: string[] | null = null\nlet fetchPromise: Promise<void> | null = null\n\n// ── Cached schema metadata for autocomplete ─────────────────────\n\nlet dbFetchPromise: Promise<void> | null = null\nconst tableCache = new Map<string, string[]>()\nconst columnCache = new Map<string, Column[]>()\nconst tableFetches = new Map<string, Promise<void>>()\nconst columnFetches = new Map<string, Promise<void>>()\n\n// ── Model name cache for $ref() autocomplete ─────────────────────\n\nlet cachedModelNames: { name: string; materialization: string; target_database: string }[] | null = null\nlet modelFetchPromise: Promise<void> | null = null\n\nasync function ensureModelsLoaded(): Promise<void> {\n  if (cachedModelNames !== null) return\n  if (modelFetchPromise) { await modelFetchPromise; return }\n  modelFetchPromise = (async () => {\n    try {\n      const { listModels } = await import('../api/models')\n      const res = await listModels()\n      cachedModelNames = (res.models ?? []).map((m: { name: string; materialization: string; target_database: string }) => ({\n        name: m.name,\n        materialization: m.materialization,\n        target_database: m.target_database,\n      }))\n    } catch {\n      cachedModelNames = []\n    } finally {\n      modelFetchPromise = null\n    }\n  })()\n  await modelFetchPromise\n}\n\n/** Invalidate model name cache (call after create/delete/rename). */\nexport function refreshModelCache(): void {\n  cachedModelNames = null\n}\n\nfunction detectRefContext(doc: string, pos: number): { inside: boolean; nameStart: number } {\n  const before = doc.slice(Math.max(0, pos - 100), pos)\n  const match = before.match(/\\$ref\\(\\s*([\\w]*)$/)\n  if (!match) return { inside: false, nameStart: pos }\n  return { inside: true, nameStart: pos - match[1].length }\n}\n\nfunction buildModelCompletions(): Completion[] {\n  return (cachedModelNames ?? []).map((m) => ({\n    label: m.name,\n    type: 'class',\n    detail: `${m.materialization} \\u2192 ${m.target_database}`,\n    boost: 50,\n  }))\n}\n\ntype SqlContext = 'table' | 'column' | 'dot' | 'function' | 'default'\n\ninterface TableRef {\n  db: string\n  table: string\n}\n\nfunction normalizeIdent(id: string): string {\n  return id.replace(/[`\"']/g, '').trim()\n}\n\nfunction parseTableRef(ref: string): TableRef | null {\n  const clean = normalizeIdent(ref)\n  if (!clean) return null\n  const parts = clean.split('.')\n  if (parts.length >= 2) {\n    return {\n      db: normalizeIdent(parts[0]),\n      table: normalizeIdent(parts[1]),\n    }\n  }\n  // Table without DB; resolve later using known databases.\n  return {\n    db: '',\n    table: normalizeIdent(parts[0]),\n  }\n}\n\nfunction tableKey(db: string, table: string): string {\n  return `${db}.${table}`\n}\n\nasync function ensureFunctionKeywordCache(): Promise<void> {\n  if (cachedFunctions !== null && cachedKeywords !== null) return\n  if (fetchPromise) {\n    await fetchPromise\n    return\n  }\n\n  fetchPromise = fetchCompletions()\n    .then(({ functions, keywords }) => {\n      cachedFunctions = functions\n      cachedKeywords = keywords\n    })\n    .catch(() => {\n      cachedFunctions = []\n      cachedKeywords = []\n    })\n\n  await fetchPromise\n}\n\nasync function ensureDatabasesLoaded(): Promise<void> {\n  if (getDatabases().length > 0) return\n  if (dbFetchPromise) {\n    await dbFetchPromise\n    return\n  }\n  dbFetchPromise = loadDatabases().catch(() => {})\n  await dbFetchPromise\n}\n\nasync function ensureTablesCached(dbName: string): Promise<void> {\n  if (tableCache.has(dbName)) return\n\n  const dbInStore = getDatabases().find((d) => d.name === dbName)\n  if (dbInStore?.tables) {\n    tableCache.set(dbName, dbInStore.tables.map((t) => t.name))\n    return\n  }\n\n  const existing = tableFetches.get(dbName)\n  if (existing) {\n    await existing\n    return\n  }\n\n  const p = listTables(dbName)\n    .then((tables) => {\n      tableCache.set(dbName, tables)\n    })\n    .catch(() => {\n      tableCache.set(dbName, [])\n    })\n    .finally(() => {\n      tableFetches.delete(dbName)\n    })\n\n  tableFetches.set(dbName, p)\n  await p\n}\n\nasync function ensureColumnsCached(dbName: string, tableName: string): Promise<void> {\n  const key = tableKey(dbName, tableName)\n  if (columnCache.has(key)) return\n\n  const dbInStore = getDatabases().find((d) => d.name === dbName)\n  const tableInStore = dbInStore?.tables?.find((t) => t.name === tableName)\n  if (tableInStore?.columns) {\n    columnCache.set(key, tableInStore.columns)\n    return\n  }\n\n  const existing = columnFetches.get(key)\n  if (existing) {\n    await existing\n    return\n  }\n\n  const p = listColumns(dbName, tableName)\n    .then((cols) => {\n      columnCache.set(key, cols)\n    })\n    .catch(() => {\n      columnCache.set(key, [])\n    })\n    .finally(() => {\n      columnFetches.delete(key)\n    })\n\n  columnFetches.set(key, p)\n  await p\n}\n\nfunction detectContext(doc: string, pos: number): SqlContext {\n  const before = doc.slice(Math.max(0, pos - 1000), pos)\n\n  if (/([`\"\\w]+)\\.([`\"\\w]*)$/i.test(before)) return 'dot'\n\n  if (/\\b(?:FROM|JOIN|INTO|UPDATE|TABLE|DATABASE)\\s+[`\"\\w.]*$/i.test(before)) return 'table'\n\n  if (/\\b(?:SELECT|WHERE|ORDER\\s+BY|GROUP\\s+BY|HAVING|AND|OR|ON|USING|SET|WITH|BY)\\s+[`\"\\w.]*$/i.test(before)) {\n    return 'column'\n  }\n\n  if (/\\b\\w+\\(\\s*[\\w.`\"]*$/i.test(before)) return 'function'\n\n  return 'default'\n}\n\nfunction buildAliasMap(doc: string): Map<string, string> {\n  const aliases = new Map<string, string>()\n  const regex = /(?:FROM|JOIN)\\s+([`\"\\w]+(?:\\.[`\"\\w]+)?)(?:\\s+(?:AS\\s+)?([`\"\\w]+))?/gi\n  let match: RegExpExecArray | null\n\n  while ((match = regex.exec(doc)) !== null) {\n    const tableRef = normalizeIdent(match[1])\n    const alias = normalizeIdent(match[2] ?? '')\n    if (alias) aliases.set(alias, tableRef)\n  }\n\n  return aliases\n}\n\nfunction buildReferencedTables(doc: string): string[] {\n  const out = new Set<string>()\n  const regex = /(?:FROM|JOIN)\\s+([`\"\\w]+(?:\\.[`\"\\w]+)?)/gi\n  let match: RegExpExecArray | null\n  while ((match = regex.exec(doc)) !== null) {\n    const tableRef = normalizeIdent(match[1])\n    if (tableRef) out.add(tableRef)\n  }\n  return [...out]\n}\n\nfunction knownDatabases(): string[] {\n  const fromStore = getDatabases().map((d) => d.name)\n  return [...new Set([...fromStore, ...tableCache.keys()])]\n}\n\nfunction findTablesForDatabase(dbName: string): string[] {\n  const fromStore = getDatabases().find((d) => d.name === dbName)?.tables?.map((t) => t.name) ?? []\n  const fromCache = tableCache.get(dbName) ?? []\n  return [...new Set([...fromStore, ...fromCache])]\n}\n\nfunction findColumns(dbName: string, tableName: string): Column[] {\n  const key = tableKey(dbName, tableName)\n  const fromCache = columnCache.get(key) ?? []\n  if (fromCache.length > 0) return fromCache\n  const fromStore = getDatabases().find((d) => d.name === dbName)?.tables?.find((t) => t.name === tableName)?.columns ?? []\n  return fromStore\n}\n\nfunction fuzzyScore(text: string, term: string): number {\n  const t = text.toLowerCase()\n  const q = term.toLowerCase().trim()\n  if (!q) return 1\n\n  if (t === q) return 180\n  if (t.startsWith(q)) return 130\n  if (t.includes(`.${q}`)) return 110\n  if (t.includes(` ${q}`)) return 95\n  if (t.includes(q)) return 70\n\n  let qi = 0\n  let seqScore = 0\n  for (let i = 0; i < t.length && qi < q.length; i++) {\n    if (t[i] === q[qi]) {\n      seqScore += i > 0 && /[._\\s]/.test(t[i - 1]) ? 8 : 3\n      qi++\n    }\n  }\n  return qi === q.length ? seqScore : -1\n}\n\nfunction rankCompletions(items: Completion[], term: string): Completion[] {\n  return items\n    .map((item) => {\n      const hay = `${item.label} ${item.detail ?? ''}`\n      const score = fuzzyScore(hay, term)\n      return { item, score: score + (item.boost ?? 0) }\n    })\n    .filter((x) => x.score >= 0)\n    .sort((a, b) => b.score - a.score)\n    .slice(0, 300)\n    .map((x) => x.item)\n}\n\nfunction dedupeCompletions(items: Completion[]): Completion[] {\n  const seen = new Set<string>()\n  const out: Completion[] = []\n  for (const item of items) {\n    const key = `${item.label}|${item.type}|${item.detail ?? ''}`\n    if (seen.has(key)) continue\n    seen.add(key)\n    out.push(item)\n  }\n  return out\n}\n\nasync function resolveUnqualifiedTableRefs(tableName: string): Promise<TableRef[]> {\n  const refs: TableRef[] = []\n  for (const db of knownDatabases()) {\n    await ensureTablesCached(db)\n    if (findTablesForDatabase(db).includes(tableName)) {\n      refs.push({ db, table: tableName })\n    }\n  }\n  return refs\n}\n\nasync function buildDotCompletions(doc: string, beforeCursor: string): Promise<Completion[]> {\n  const match = beforeCursor.match(/([`\"\\w]+)\\.([`\"\\w]*)$/)\n  if (!match) return []\n\n  const lhs = normalizeIdent(match[1])\n  const aliasMap = buildAliasMap(doc)\n  const aliasTableRef = aliasMap.get(lhs)\n\n  // alias.column -> resolve alias table and columns\n  if (aliasTableRef) {\n    const parsed = parseTableRef(aliasTableRef)\n    if (!parsed) return []\n\n    const refs: TableRef[] = parsed.db\n      ? [parsed]\n      : await resolveUnqualifiedTableRefs(parsed.table)\n\n    const options: Completion[] = []\n    for (const ref of refs) {\n      await ensureColumnsCached(ref.db, ref.table)\n      for (const col of findColumns(ref.db, ref.table)) {\n        options.push({\n          label: col.name,\n          detail: `${col.type} (${ref.db}.${ref.table})`,\n          type: 'property',\n          boost: 22,\n        })\n      }\n    }\n    return options\n  }\n\n  // db.table -> suggest table list\n  const dbNames = knownDatabases()\n  if (dbNames.includes(lhs)) {\n    await ensureTablesCached(lhs)\n    return findTablesForDatabase(lhs).map((t) => ({\n      label: t,\n      detail: lhs,\n      type: 'class',\n      boost: 18,\n    }))\n  }\n\n  // table.column (unqualified table name) -> resolve across dbs\n  const refs = await resolveUnqualifiedTableRefs(lhs)\n  const options: Completion[] = []\n  for (const ref of refs) {\n    await ensureColumnsCached(ref.db, ref.table)\n    for (const col of findColumns(ref.db, ref.table)) {\n      options.push({\n        label: col.name,\n        detail: `${col.type} (${ref.db}.${ref.table})`,\n        type: 'property',\n        boost: 16,\n      })\n    }\n  }\n  return options\n}\n\nfunction buildDatabaseCompletions(): Completion[] {\n  return knownDatabases().map((db) => ({\n    label: db,\n    type: 'namespace',\n    boost: 5,\n  }))\n}\n\nasync function buildTableCompletions(term: string): Promise<Completion[]> {\n  const options: Completion[] = []\n  const termClean = normalizeIdent(term)\n  const dotIdx = termClean.indexOf('.')\n\n  // If user typed \"db.\" in FROM/JOIN context, prioritize tables of that DB\n  if (dotIdx >= 0) {\n    const dbName = termClean.slice(0, dotIdx)\n    if (dbName) {\n      await ensureTablesCached(dbName)\n      for (const t of findTablesForDatabase(dbName)) {\n        options.push({\n          label: t,\n          detail: dbName,\n          type: 'class',\n          boost: 24,\n        })\n      }\n      return options\n    }\n  }\n\n  for (const dbName of knownDatabases()) {\n    await ensureTablesCached(dbName)\n    for (const table of findTablesForDatabase(dbName)) {\n      options.push({\n        label: `${dbName}.${table}`,\n        type: 'class',\n        boost: 16,\n      })\n      options.push({\n        label: table,\n        detail: dbName,\n        type: 'class',\n        boost: 10,\n      })\n    }\n  }\n\n  return options\n}\n\nasync function buildReferencedColumnCompletions(doc: string): Promise<Completion[]> {\n  const aliasMap = buildAliasMap(doc)\n  const tableRefs = buildReferencedTables(doc)\n  const options: Completion[] = []\n\n  // map tableRef -> aliases for alias-qualified completion\n  const refToAliases = new Map<string, string[]>()\n  for (const [alias, ref] of aliasMap.entries()) {\n    const list = refToAliases.get(ref) ?? []\n    list.push(alias)\n    refToAliases.set(ref, list)\n  }\n\n  for (const refRaw of tableRefs) {\n    const parsed = parseTableRef(refRaw)\n    if (!parsed) continue\n\n    const refs: TableRef[] = parsed.db\n      ? [parsed]\n      : await resolveUnqualifiedTableRefs(parsed.table)\n\n    for (const ref of refs) {\n      await ensureColumnsCached(ref.db, ref.table)\n      const columns = findColumns(ref.db, ref.table)\n      for (const col of columns) {\n        options.push({\n          label: col.name,\n          detail: `${col.type} (${ref.db}.${ref.table})`,\n          type: 'property',\n          boost: 20,\n        })\n\n        // alias.column suggestions for JOIN/ON precision\n        const aliases = refToAliases.get(refRaw) ?? []\n        for (const alias of aliases) {\n          options.push({\n            label: `${alias}.${col.name}`,\n            detail: `${col.type} (${ref.db}.${ref.table})`,\n            type: 'property',\n            boost: 24,\n          })\n        }\n      }\n    }\n  }\n\n  return options\n}\n\nfunction buildFunctionCompletions(): Completion[] {\n  return (cachedFunctions ?? []).map((fn) => ({\n    label: fn,\n    type: 'function',\n    boost: 7,\n    detail: 'Function',\n  }))\n}\n\nfunction buildKeywordCompletions(): Completion[] {\n  return (cachedKeywords ?? []).map((kw) => ({\n    label: kw,\n    type: 'keyword',\n    boost: 4,\n  }))\n}\n\nfunction buildSnippetCompletions(): Completion[] {\n  return [\n    snippetCompletion('\\\\$ref(${model_name})', {\n      label: '$ref()',\n      type: 'snippet',\n      detail: 'Reference another model',\n      boost: 35,\n    }),\n    snippetCompletion('SELECT ${columns}\\nFROM ${database}.${table}\\nLIMIT ${1000}', {\n      label: 'SELECT … FROM',\n      type: 'snippet',\n      detail: 'Query starter',\n      boost: 30,\n    }),\n    snippetCompletion('SELECT ${a}.*, ${b}.*\\nFROM ${table_a} ${a}\\nJOIN ${table_b} ${b} ON ${a}.${id} = ${b}.${id}', {\n      label: 'JOIN Template',\n      type: 'snippet',\n      detail: 'Join two tables',\n      boost: 28,\n    }),\n    snippetCompletion('WITH ${cte_name} AS (\\n  ${query}\\n)\\nSELECT *\\nFROM ${cte_name}', {\n      label: 'CTE Template',\n      type: 'snippet',\n      detail: 'WITH clause',\n      boost: 26,\n    }),\n    snippetCompletion('countIf(${condition})', {\n      label: 'countIf()',\n      type: 'snippet',\n      detail: 'Conditional count',\n      boost: 20,\n    }),\n    snippetCompletion('sumIf(${value}, ${condition})', {\n      label: 'sumIf()',\n      type: 'snippet',\n      detail: 'Conditional sum',\n      boost: 20,\n    }),\n    snippetCompletion('uniqExact(${value})', {\n      label: 'uniqExact()',\n      type: 'snippet',\n      detail: 'Exact cardinality',\n      boost: 18,\n    }),\n    snippetCompletion('toStartOfInterval(${timestamp}, INTERVAL ${5} minute)', {\n      label: 'toStartOfInterval()',\n      type: 'snippet',\n      detail: 'Time bucket',\n      boost: 18,\n    }),\n  ]\n}\n\n// ── Main completion source ──────────────────────────────────────\n\nexport async function clickhouseCompletionSource(\n  context: CompletionContext,\n): Promise<CompletionResult | null> {\n  const word = context.matchBefore(/[\\w.`\"]*/)\n  if (!word) return null\n  if (word.from === word.to && !context.explicit) return null\n\n  // Warm caches in background so autocomplete stays responsive even when\n  // metadata endpoints are slow or temporarily unavailable.\n  void ensureFunctionKeywordCache()\n  void ensureDatabasesLoaded()\n\n  const doc = context.state.doc.toString()\n  const beforeCursor = doc.slice(0, context.pos)\n\n  // Check for $ref() context first — return only model names\n  const refCtx = detectRefContext(doc, context.pos)\n  if (refCtx.inside) {\n    await ensureModelsLoaded()\n    return {\n      from: refCtx.nameStart,\n      options: buildModelCompletions(),\n      validFor: /^\\w*$/,\n    }\n  }\n\n  // Detect context at the cursor (not token start) so dot/function contexts are\n  // classified correctly while the user is actively typing.\n  const sqlCtx = detectContext(doc, context.pos)\n  const term = word.text.replace(/[`\"]/g, '')\n\n  let options: Completion[] = []\n\n  switch (sqlCtx) {\n    case 'dot':\n      options = await buildDotCompletions(doc, beforeCursor)\n      break\n    case 'table':\n      options = [\n        ...(await buildTableCompletions(term)),\n        ...buildDatabaseCompletions(),\n        ...buildKeywordCompletions().filter((k) => ['JOIN', 'ON', 'USING', 'WHERE'].includes(k.label)),\n      ]\n      break\n    case 'column':\n      options = [\n        ...(await buildReferencedColumnCompletions(doc)),\n        ...buildFunctionCompletions(),\n        ...buildSnippetCompletions(),\n        ...buildKeywordCompletions(),\n      ]\n      break\n    case 'function':\n      options = [\n        ...buildFunctionCompletions(),\n        ...(await buildReferencedColumnCompletions(doc)),\n      ]\n      break\n    default:\n      options = [\n        ...buildSnippetCompletions(),\n        ...buildKeywordCompletions(),\n        ...buildFunctionCompletions(),\n        ...buildDatabaseCompletions(),\n        ...(await buildTableCompletions(term)),\n        ...(await buildReferencedColumnCompletions(doc)),\n      ]\n      break\n  }\n\n  const ranked = rankCompletions(dedupeCompletions(options), term)\n\n  return {\n    from: word.from,\n    options: ranked,\n    validFor: /^[\\w.`\"]*$/,\n  }\n}\n"
  },
  {
    "path": "ui/src/lib/stores/command-palette.svelte.ts",
    "content": "let open = $state(false)\n\nexport function isCommandPaletteOpen(): boolean {\n  return open\n}\n\nexport function openCommandPalette(): void {\n  open = true\n}\n\nexport function closeCommandPalette(): void {\n  open = false\n}\n\nexport function toggleCommandPalette(): void {\n  open = !open\n}\n"
  },
  {
    "path": "ui/src/lib/stores/license.svelte.ts",
    "content": "import type { LicenseInfo } from '../types/api'\nimport { apiGet } from '../api/client'\n\nlet license = $state<LicenseInfo | null>(null)\nlet loading = $state(false)\nlet loadPromise: Promise<void> | null = null\n\nexport function getLicense(): LicenseInfo | null {\n  return license\n}\n\nexport function isLicenseLoading(): boolean {\n  return loading\n}\n\nexport function isProActive(): boolean {\n  return !!(license?.valid && license?.edition?.toLowerCase() === 'pro')\n}\n\nexport async function loadLicense(force = false): Promise<void> {\n  if (!force && license) return\n  if (loadPromise) {\n    await loadPromise\n    return\n  }\n\n  loading = true\n  loadPromise = apiGet<LicenseInfo>('/api/license')\n    .then((res) => {\n      license = res\n    })\n    .catch(() => {\n      license = null\n    })\n    .finally(() => {\n      loading = false\n      loadPromise = null\n    })\n\n  await loadPromise\n}\n"
  },
  {
    "path": "ui/src/lib/stores/number-format.svelte.ts",
    "content": "const STORAGE_KEY = 'ch-ui-format-numbers'\n\nconst initial = localStorage.getItem(STORAGE_KEY) !== 'false'\nlet formatNumbers = $state<boolean>(initial)\n\nexport function getFormatNumbers(): boolean {\n  return formatNumbers\n}\n\nexport function toggleFormatNumbers(): void {\n  formatNumbers = !formatNumbers\n  localStorage.setItem(STORAGE_KEY, String(formatNumbers))\n}\n"
  },
  {
    "path": "ui/src/lib/stores/query-limit.svelte.ts",
    "content": "const STORAGE_KEY = 'ch-ui-max-result-rows'\nconst DEFAULT_LIMIT = 1000\n\nconst stored = parseInt(localStorage.getItem(STORAGE_KEY) ?? '', 10)\nlet maxResultRows = $state<number>(isNaN(stored) || stored < 1 ? DEFAULT_LIMIT : stored)\n\nexport function getMaxResultRows(): number {\n  return maxResultRows\n}\n\nexport function setMaxResultRows(value: number): void {\n  const clamped = Math.max(1, Math.round(value))\n  maxResultRows = clamped\n  localStorage.setItem(STORAGE_KEY, String(clamped))\n}\n"
  },
  {
    "path": "ui/src/lib/stores/router.svelte.ts",
    "content": "import { withBase, stripBase } from '../basePath'\nimport type { SingletonTab } from './tabs.svelte'\nimport { getActiveTab, getTabs, openDashboardTab, openHomeTab, openSingletonTab, setActiveTab } from './tabs.svelte'\n\n// ── URL ↔ Tab mapping ────────────────────────────────────────────\n\nconst TAB_PATHS: Record<string, string> = {\n  'home': '/',\n  'saved-queries': '/saved-queries',\n  'dashboards': '/dashboards',\n  'schedules': '/schedules',\n  'brain': '/brain',\n  'admin': '/admin',\n  'governance': '/governance',\n  'pipelines': '/pipelines',\n  'models': '/models',\n  'model': '/models',\n  'settings': '/license',\n}\n\nconst PATH_TABS: Record<string, { type: SingletonTab['type']; label: string }> = {\n  '/saved-queries': { type: 'saved-queries', label: 'Saved Queries' },\n  '/dashboards': { type: 'dashboards', label: 'Dashboards' },\n  '/schedules': { type: 'schedules', label: 'Schedules' },\n  '/brain': { type: 'brain', label: 'Brain' },\n  '/admin': { type: 'admin', label: 'Admin' },\n  '/governance': { type: 'governance', label: 'Governance' },\n  '/pipelines': { type: 'pipelines', label: 'Pipelines' },\n  '/models': { type: 'models', label: 'Models' },\n  '/settings': { type: 'settings', label: 'License' },\n  '/license': { type: 'settings', label: 'License' },\n}\n\n// Prevents pushState during popstate-triggered tab activation\nlet suppressPush = false\n\n// ── Pipeline sub-route state ─────────────────────────────────────\n\nlet pipelineId = $state<string | undefined>(undefined)\n\nexport function getCurrentPipelineId(): string | undefined {\n  return pipelineId\n}\n\n// ── URL helpers ──────────────────────────────────────────────────\n\nfunction buildUrl(path: string, tabId?: string): string {\n  const fullPath = withBase(path)\n  if (tabId) return `${fullPath}?tab=${tabId}`\n  return fullPath\n}\n\nfunction currentTabParam(): string | null {\n  return new URLSearchParams(window.location.search).get('tab')\n}\n\nfunction pushUrl(path: string, tabId?: string): void {\n  const url = buildUrl(path, tabId)\n  const currentPath = window.location.pathname\n\n  if (currentPath !== withBase(path)) {\n    history.pushState(null, '', url)\n  } else if (currentTabParam() !== tabId) {\n    history.replaceState(null, '', url)\n  }\n}\n\n// ── Push helpers ─────────────────────────────────────────────────\n\nexport function pushTabRoute(tabType: string): void {\n  if (suppressPush) return\n  const path = TAB_PATHS[tabType] ?? '/'\n  const activeTab = getActiveTab()\n  pushUrl(path, activeTab?.id)\n}\n\nexport function pushTabRouteForTab(tab: { id: string; type: string; dashboardId?: string }): void {\n  if (suppressPush) return\n  if (tab.type === 'dashboard' && tab.dashboardId) {\n    pushUrl(`/dashboards/${tab.dashboardId}`, tab.id)\n    return\n  }\n  const path = TAB_PATHS[tab.type] ?? '/'\n  pushUrl(path, tab.id)\n}\n\nexport function pushDashboardDetail(id: string): void {\n  if (suppressPush) return\n  const dashTab = getTabs().find(t => t.type === 'dashboard' && 'dashboardId' in t && t.dashboardId === id)\n  pushUrl('/dashboards/' + id, dashTab?.id)\n}\n\nexport function pushDashboardList(): void {\n  if (suppressPush) return\n  const tab = getTabs().find(t => t.type === 'dashboards')\n  pushUrl('/dashboards', tab?.id)\n}\n\nexport function pushPipelineDetail(id: string): void {\n  if (suppressPush) return\n  const tab = getTabs().find(t => t.type === 'pipelines')\n  pushUrl('/pipelines/' + id, tab?.id)\n  pipelineId = id\n}\n\nexport function pushPipelineList(): void {\n  if (suppressPush) return\n  const tab = getTabs().find(t => t.type === 'pipelines')\n  pushUrl('/pipelines', tab?.id)\n  pipelineId = undefined\n}\n\n// ── Parse current URL ───────────────────────────────────────────\n\nexport function parseRoute(): { type: string; dashboardId?: string; pipelineId?: string } {\n  const path = stripBase(window.location.pathname)\n\n  // /dashboards/:id\n  const dashMatch = path.match(/^\\/dashboards\\/(.+)$/)\n  if (dashMatch) {\n    return { type: 'dashboard', dashboardId: dashMatch[1] }\n  }\n\n  // /pipelines/:id\n  const pipeMatch = path.match(/^\\/pipelines\\/(.+)$/)\n  if (pipeMatch) {\n    return { type: 'pipelines', pipelineId: pipeMatch[1] }\n  }\n\n  // Known singleton paths\n  const entry = PATH_TABS[path]\n  if (entry) {\n    return { type: entry.type }\n  }\n\n  // Default: home (query editor)\n  return { type: 'home' }\n}\n\n// ── Restore from ?tab= query param ─────────────────────────────\n\nfunction tryRestoreFromTabParam(): boolean {\n  const tabId = currentTabParam()\n  if (!tabId) return false\n  const tab = getTabs().find(t => t.id === tabId)\n  if (!tab) return false\n  suppressPush = true\n  setActiveTab(tabId)\n  suppressPush = false\n  return true\n}\n\nfunction updateSubRouteState(): void {\n  const match = stripBase(window.location.pathname).match(/^\\/pipelines\\/(.+)$/)\n  pipelineId = match?.[1]\n}\n\n// ── Sync URL → tab state ────────────────────────────────────────\n\nfunction syncRouteToTabs(): void {\n  const route = parseRoute()\n\n  // Update pipeline sub-route state\n  pipelineId = route.pipelineId\n\n  if (route.type === 'home') {\n    openHomeTab()\n    return\n  }\n  if (route.type === 'dashboard' && route.dashboardId) {\n    suppressPush = true\n    openDashboardTab(route.dashboardId, 'Dashboard')\n    suppressPush = false\n    return\n  }\n  if (route.type === 'pipelines') {\n    suppressPush = true\n    openSingletonTab('pipelines', 'Pipelines')\n    suppressPush = false\n    return\n  }\n  const entry = PATH_TABS[TAB_PATHS[route.type]]\n  if (entry) {\n    suppressPush = true\n    openSingletonTab(entry.type, entry.label)\n    suppressPush = false\n  }\n}\n\n// ── Initialize ──────────────────────────────────────────────────\n\nlet initialized = false\n\nexport function initRouter(): void {\n  if (initialized) return\n  initialized = true\n\n  // On initial load, try ?tab= param first (survives reload reliably)\n  if (!tryRestoreFromTabParam()) {\n    // Fallback: sync from URL pathname\n    syncRouteToTabs()\n  }\n  updateSubRouteState()\n\n  // Seed ?tab= if missing so a subsequent reload works\n  const activeTab = getActiveTab()\n  if (activeTab && !currentTabParam()) {\n    const url = buildUrl(stripBase(window.location.pathname), activeTab.id)\n    history.replaceState(null, '', url)\n  }\n\n  // Handle browser back/forward\n  window.addEventListener('popstate', () => {\n    if (!tryRestoreFromTabParam()) {\n      syncRouteToTabs()\n    }\n    updateSubRouteState()\n  })\n}\n"
  },
  {
    "path": "ui/src/lib/stores/schema.svelte.ts",
    "content": "import type { Database, Table, Column } from '../types/schema'\nimport { apiGet } from '../api/client'\n\nlet databases = $state<Database[]>([])\nlet loading = $state(false)\n\nexport function getDatabases(): Database[] {\n  return databases\n}\n\nexport function isSchemaLoading(): boolean {\n  return loading\n}\n\nexport async function loadDatabases(): Promise<void> {\n  loading = true\n  try {\n    const res = await apiGet<{ databases: string[] }>('/api/query/databases')\n    databases = (res.databases ?? []).map(name => ({ name }))\n  } catch {\n    databases = []\n  } finally {\n    loading = false\n  }\n}\n\nexport async function loadTables(dbName: string): Promise<void> {\n  databases = databases.map(db => {\n    if (db.name !== dbName) return db\n    return { ...db, loading: true, expanded: true }\n  })\n\n  try {\n    const res = await apiGet<{ tables: Array<{ name: string; engine: string }> }>(`/api/query/tables?database=${encodeURIComponent(dbName)}`)\n    const tables: Table[] = (res.tables ?? []).map(t => ({ name: t.name, engine: t.engine }))\n    databases = databases.map(db => {\n      if (db.name !== dbName) return db\n      return { ...db, tables, loading: false }\n    })\n  } catch {\n    databases = databases.map(db => {\n      if (db.name !== dbName) return db\n      return { ...db, loading: false }\n    })\n  }\n}\n\nexport async function loadColumns(dbName: string, tableName: string): Promise<void> {\n  databases = databases.map(db => {\n    if (db.name !== dbName) return db\n    return {\n      ...db,\n      tables: db.tables?.map(t => {\n        if (t.name !== tableName) return t\n        return { ...t, loading: true, expanded: true }\n      }),\n    }\n  })\n\n  try {\n    const res = await apiGet<{ columns: Column[] }>(`/api/query/columns?database=${encodeURIComponent(dbName)}&table=${encodeURIComponent(tableName)}`)\n    const columns: Column[] = res.columns ?? []\n    databases = databases.map(db => {\n      if (db.name !== dbName) return db\n      return {\n        ...db,\n        tables: db.tables?.map(t => {\n          if (t.name !== tableName) return t\n          return { ...t, columns, loading: false }\n        }),\n      }\n    })\n  } catch {\n    databases = databases.map(db => {\n      if (db.name !== dbName) return db\n      return {\n        ...db,\n        tables: db.tables?.map(t => {\n          if (t.name !== tableName) return t\n          return { ...t, loading: false }\n        }),\n      }\n    })\n  }\n}\n\nexport function toggleDatabase(dbName: string): void {\n  const db = databases.find(d => d.name === dbName)\n  if (!db) return\n  if (db.expanded) {\n    databases = databases.map(d => d.name === dbName ? { ...d, expanded: false } : d)\n  } else {\n    loadTables(dbName)\n  }\n}\n\nexport function toggleTable(dbName: string, tableName: string): void {\n  const db = databases.find(d => d.name === dbName)\n  const table = db?.tables?.find(t => t.name === tableName)\n  if (!table) return\n  if (table.expanded) {\n    databases = databases.map(d => {\n      if (d.name !== dbName) return d\n      return {\n        ...d,\n        tables: d.tables?.map(t => t.name === tableName ? { ...t, expanded: false } : t),\n      }\n    })\n  } else {\n    loadColumns(dbName, tableName)\n  }\n}\n"
  },
  {
    "path": "ui/src/lib/stores/session.svelte.ts",
    "content": "import type { Session } from '../types/api'\nimport { checkSession, login as apiLogin, logout as apiLogout } from '../api/auth'\n\nlet session = $state<Session | null>(null)\nlet loading = $state(true)\nlet error = $state<string | null>(null)\n\nexport function getSession(): Session | null {\n  return session\n}\n\nexport function isLoading(): boolean {\n  return loading\n}\n\nexport function getError(): string | null {\n  return error\n}\n\nexport function isAuthenticated(): boolean {\n  return session !== null\n}\n\n/** Initialize session from server cookie */\nexport async function initSession(): Promise<void> {\n  loading = true\n  error = null\n  try {\n    session = await checkSession()\n  } catch (e) {\n    session = null\n  } finally {\n    loading = false\n  }\n}\n\n/** Log in and set session */\nexport async function login(connectionId: string, username: string, password: string): Promise<void> {\n  error = null\n  loading = true\n  try {\n    const res = await apiLogin({ connectionId, username, password })\n    session = res.session\n  } catch (e: any) {\n    error = e.message || 'Login failed'\n    throw e\n  } finally {\n    loading = false\n  }\n}\n\n/** Log out and clear session */\nexport async function logout(): Promise<void> {\n  try {\n    await apiLogout()\n  } finally {\n    session = null\n  }\n}\n"
  },
  {
    "path": "ui/src/lib/stores/tabs.svelte.ts",
    "content": "import type { ColumnMeta, QueryStats } from '../types/query'\nimport type { ModelEditState } from '../types/models'\nimport { createUUID } from '../utils/uuid'\nimport { pushTabRouteForTab } from './router.svelte'\n\n// ── Tab types ────────────────────────────────────────────────────\n\nexport type TabType = 'home' | 'query' | 'table' | 'database' | 'dashboard' | 'model' | 'saved-queries' | 'settings' | 'dashboards' | 'schedules' | 'brain' | 'admin' | 'governance' | 'pipelines' | 'models'\n\ninterface TabBase {\n  id: string\n  type: TabType\n  name: string\n}\n\nexport interface QueryTab extends TabBase {\n  type: 'query'\n  sql: string\n  dirty: boolean\n  savedQueryId?: string\n  baseSql?: string\n}\n\nexport interface TableTab extends TabBase {\n  type: 'table'\n  database: string\n  table: string\n}\n\nexport interface DatabaseTab extends TabBase {\n  type: 'database'\n  database: string\n}\n\nexport interface DashboardTab extends TabBase {\n  type: 'dashboard'\n  dashboardId: string\n}\n\nexport interface ModelTab extends TabBase {\n  type: 'model'\n  modelId: string\n  dirty: boolean\n  edit: ModelEditState\n  base: ModelEditState\n  status: string\n  lastError: string | null\n}\n\nexport interface HomeTab extends TabBase {\n  type: 'home'\n}\n\nexport interface SingletonTab extends TabBase {\n  type: 'saved-queries' | 'settings' | 'dashboards' | 'schedules' | 'brain' | 'admin' | 'governance' | 'pipelines' | 'models'\n}\n\nexport type Tab = HomeTab | QueryTab | TableTab | DatabaseTab | DashboardTab | ModelTab | SingletonTab\n\n// ── Tab Groups (split view) ─────────────────────────────────────\n\nexport interface TabGroup {\n  id: string        // 'left' | 'right'\n  tabIds: string[]  // ordered tab IDs in this group\n  activeTabId: string\n}\n\n// ── Per-tab query results ────────────────────────────────────────\n\nexport interface TabResult {\n  meta: ColumnMeta[]\n  data: unknown[][]\n  stats: QueryStats | null\n  elapsedMs: number\n  error: string | null\n  running: boolean\n}\n\n// ── Persistence ─────────────────────────────────────────────────\n\nconst STORAGE_KEY = 'ch-ui-tabs'\nconst HOME_TAB_ID = 'home'\nconst HOME_TAB_NAME = 'Home'\n\ninterface StorageFormat {\n  tabs: Tab[]\n  groups: TabGroup[]\n  focusedGroupId: string\n}\n\nfunction saveTabs(): void {\n  try {\n    localStorage.setItem(STORAGE_KEY, JSON.stringify({\n      tabs,\n      groups,\n      focusedGroupId,\n    }))\n  } catch {\n    // localStorage full or unavailable\n  }\n}\n\nfunction loadTabs(): StorageFormat {\n  try {\n    const raw = localStorage.getItem(STORAGE_KEY)\n    if (raw) {\n      const parsed = JSON.parse(raw)\n      if (Array.isArray(parsed.tabs) && parsed.tabs.length > 0) {\n        // Derive nextNum from existing query tab names\n        for (const t of parsed.tabs) {\n          if (t.type === 'query') {\n            const match = t.name.match(/^Query (\\d+)$/)\n            if (match) {\n              const n = parseInt(match[1], 10)\n              if (n >= nextNum) nextNum = n + 1\n            }\n          }\n        }\n        // New format: has groups array\n        if (Array.isArray(parsed.groups) && parsed.groups.length > 0) {\n          return normalizeTabsState({\n            tabs: parsed.tabs,\n            groups: parsed.groups,\n            focusedGroupId: parsed.focusedGroupId || 'left',\n          })\n        }\n        // Legacy migration: old format had { tabs, activeTabId }\n        const activeId = parsed.activeTabId || parsed.tabs[0].id\n        return normalizeTabsState({\n          tabs: parsed.tabs,\n          groups: [{ id: 'left', tabIds: parsed.tabs.map((t: Tab) => t.id), activeTabId: activeId }],\n          focusedGroupId: 'left',\n        })\n      }\n    }\n  } catch {\n    // corrupt data\n  }\n  const homeTab = createHomeTab()\n  return normalizeTabsState({\n    tabs: [homeTab],\n    groups: [{ id: 'left', tabIds: [homeTab.id], activeTabId: homeTab.id }],\n    focusedGroupId: 'left',\n  })\n}\n\n// ── State ────────────────────────────────────────────────────────\n\nlet nextNum = 1\n\ninterface CreateQueryTabOptions {\n  name?: string\n  savedQueryId?: string\n  baseSql?: string\n}\n\nfunction createQueryTab(sql = '', options: CreateQueryTabOptions = {}): QueryTab {\n  const id = createUUID()\n  const name = options.name?.trim() ? options.name.trim() : `Query ${nextNum++}`\n  return {\n    id,\n    type: 'query',\n    name,\n    sql,\n    dirty: false,\n    savedQueryId: options.savedQueryId,\n    baseSql: options.baseSql ?? sql,\n  }\n}\n\nfunction createHomeTab(): HomeTab {\n  return {\n    id: HOME_TAB_ID,\n    type: 'home',\n    name: HOME_TAB_NAME,\n  }\n}\n\nfunction normalizeTabsState(state: StorageFormat): StorageFormat {\n  const existingHome = state.tabs.find((tab) => tab.type === 'home') as HomeTab | undefined\n  const homeTab = existingHome ?? createHomeTab()\n\n  const nonHomeTabs = state.tabs.filter((tab) => tab.type !== 'home' && tab.id !== HOME_TAB_ID)\n  const normalizedTabs: Tab[] = [homeTab, ...nonHomeTabs]\n  const tabIdSet = new Set(normalizedTabs.map((tab) => tab.id))\n\n  const incomingGroups = state.groups.length > 0\n    ? state.groups.map((group) => ({ ...group }))\n    : [{ id: 'left', tabIds: [], activeTabId: homeTab.id }]\n\n  if (!incomingGroups.some((group) => group.id === 'left')) {\n    incomingGroups[0] = { ...incomingGroups[0], id: 'left' }\n  }\n  if (incomingGroups.length > 2) {\n    incomingGroups.splice(2)\n  }\n  if (incomingGroups.length === 2) {\n    incomingGroups[1] = { ...incomingGroups[1], id: 'right' }\n  }\n\n  const normalizedGroups = incomingGroups.map((group) => {\n    const seen = new Set<string>()\n    const ids = group.tabIds.filter((tabId) => {\n      if (!tabIdSet.has(tabId) || tabId === homeTab.id || seen.has(tabId)) return false\n      seen.add(tabId)\n      return true\n    })\n    return { ...group, tabIds: ids }\n  })\n\n  const leftGroup = normalizedGroups.find((group) => group.id === 'left') ?? normalizedGroups[0]\n  if (!leftGroup) {\n    normalizedGroups.push({ id: 'left', tabIds: [homeTab.id], activeTabId: homeTab.id })\n  } else {\n    leftGroup.tabIds = [homeTab.id, ...leftGroup.tabIds]\n  }\n\n  const assignedTabIds = new Set(normalizedGroups.flatMap((group) => group.tabIds))\n  for (const tab of nonHomeTabs) {\n    if (!assignedTabIds.has(tab.id)) {\n      const target = normalizedGroups.find((group) => group.id === 'left') ?? normalizedGroups[0]\n      target.tabIds.push(tab.id)\n      assignedTabIds.add(tab.id)\n    }\n  }\n\n  const hydratedGroups = normalizedGroups\n    .filter((group) => group.id === 'left' || group.tabIds.length > 0)\n    .map((group) => {\n      const activeTabId = group.tabIds.includes(group.activeTabId)\n        ? group.activeTabId\n        : (group.tabIds[0] ?? homeTab.id)\n      return { ...group, activeTabId }\n    })\n\n  const groups = hydratedGroups.length > 0\n    ? hydratedGroups\n    : [{ id: 'left', tabIds: [homeTab.id], activeTabId: homeTab.id }]\n  const focusedGroupId = groups.some((group) => group.id === state.focusedGroupId)\n    ? state.focusedGroupId\n    : 'left'\n\n  return {\n    tabs: normalizedTabs,\n    groups,\n    focusedGroupId,\n  }\n}\n\nconst initial = loadTabs()\n\nlet tabs = $state<Tab[]>(initial.tabs.map((tab) => {\n  if (tab.type !== 'query') return tab\n  const queryTab = tab as QueryTab\n  return {\n    ...queryTab,\n    baseSql: typeof queryTab.baseSql === 'string'\n      ? queryTab.baseSql\n      : (queryTab.dirty ? '' : queryTab.sql),\n  }\n}))\nlet groups = $state<TabGroup[]>(initial.groups)\nlet focusedGroupId = $state<string>(initial.focusedGroupId)\nlet results = $state<Map<string, TabResult>>(new Map())\n\n// Auto-save on any change (debounced via microtask)\nlet saveQueued = false\nfunction queueSave(): void {\n  if (saveQueued) return\n  saveQueued = true\n  queueMicrotask(() => {\n    saveTabs()\n    saveQueued = false\n  })\n}\n\n// ── Internal helpers ────────────────────────────────────────────\n\nfunction findGroupForTab(tabId: string): string | undefined {\n  return groups.find(g => g.tabIds.includes(tabId))?.id\n}\n\nfunction isHomeTabId(tabId: string): boolean {\n  const tab = tabs.find((entry) => entry.id === tabId)\n  return !!tab && tab.type === 'home'\n}\n\nfunction resolveTargetGroupId(targetGroupId?: string): string {\n  const candidate = targetGroupId ?? focusedGroupId\n  if (groups.some((group) => group.id === candidate)) return candidate\n  return groups[0]?.id ?? 'left'\n}\n\n// ── Getters ─────────────────────────────────────────────────────\n\nexport function getTabs(): Tab[] {\n  return tabs\n}\n\n/** Backward-compat: returns focused group's active tab ID */\nexport function getActiveTabId(): string {\n  const group = groups.find(g => g.id === focusedGroupId) ?? groups[0]\n  return group?.activeTabId ?? ''\n}\n\n/** Backward-compat: returns focused group's active tab */\nexport function getActiveTab(): Tab | undefined {\n  return tabs.find(t => t.id === getActiveTabId())\n}\n\n// ── Group getters ───────────────────────────────────────────────\n\nexport function getGroups(): TabGroup[] {\n  return groups\n}\n\nexport function getFocusedGroupId(): string {\n  return focusedGroupId\n}\n\nexport function isSplit(): boolean {\n  return groups.length === 2\n}\n\nexport function getGroupTabs(groupId: string): Tab[] {\n  const group = groups.find(g => g.id === groupId)\n  if (!group) return []\n  return group.tabIds.map(id => tabs.find(t => t.id === id)).filter(Boolean) as Tab[]\n}\n\nexport function getGroupActiveTab(groupId: string): Tab | undefined {\n  const group = groups.find(g => g.id === groupId)\n  if (!group) return undefined\n  return tabs.find(t => t.id === group.activeTabId)\n}\n\nexport function getGroupActiveTabId(groupId: string): string {\n  const group = groups.find(g => g.id === groupId)\n  return group?.activeTabId ?? ''\n}\n\n// ── Tab result accessors ─────────────────────────────────────────\n\nexport function getTabResult(tabId: string): TabResult | undefined {\n  return results.get(tabId)\n}\n\nexport function setTabResult(tabId: string, partial: Partial<TabResult>): void {\n  const current = results.get(tabId) ?? {\n    meta: [],\n    data: [],\n    stats: null,\n    elapsedMs: 0,\n    error: null,\n    running: false,\n  }\n  const updated = new Map(results)\n  updated.set(tabId, { ...current, ...partial })\n  results = updated\n}\n\nexport function clearTabResult(tabId: string): void {\n  const updated = new Map(results)\n  updated.delete(tabId)\n  results = updated\n}\n\n// ── Actions ─────────────────────────────────────────────────────\n\nexport function setActiveTab(id: string, groupId?: string): void {\n  const gid = groupId ?? findGroupForTab(id) ?? focusedGroupId\n  groups = groups.map(g =>\n    g.id === gid ? { ...g, activeTabId: id } : g\n  )\n  focusedGroupId = gid\n  // Sync URL to match the activated tab\n  const tab = tabs.find(t => t.id === id)\n  if (tab) pushTabRouteForTab(tab)\n  queueSave()\n}\n\nexport function setFocusedGroup(groupId: string): void {\n  focusedGroupId = groupId\n}\n\n// ── Open tabs (with deduplication) ───────────────────────────────\n\nexport function openHomeTab(): void {\n  const homeTab = tabs.find((tab) => tab.type === 'home') as HomeTab | undefined\n  if (homeTab) {\n    setActiveTab(homeTab.id, 'left')\n    return\n  }\n\n  const tab = createHomeTab()\n  tabs = [tab, ...tabs]\n  const leftGroup = groups.find((group) => group.id === 'left')\n  if (leftGroup) {\n    groups = groups.map((group) =>\n      group.id === 'left'\n        ? { ...group, tabIds: [tab.id, ...group.tabIds], activeTabId: tab.id }\n        : group,\n    )\n  } else {\n    groups = [{ id: 'left', tabIds: [tab.id], activeTabId: tab.id }, ...groups]\n  }\n  focusedGroupId = 'left'\n  pushTabRouteForTab(tab)\n  queueSave()\n}\n\nexport function openQueryTab(sql = '', targetGroupId?: string): void {\n  const tab = createQueryTab(sql)\n  tabs = [...tabs, tab]\n  const gid = resolveTargetGroupId(targetGroupId)\n  groups = groups.map(g =>\n    g.id === gid ? { ...g, tabIds: [...g.tabIds, tab.id], activeTabId: tab.id } : g\n  )\n  focusedGroupId = gid\n  queueSave()\n}\n\ninterface SavedQueryTabInput {\n  id: string\n  name: string\n  query: string\n}\n\nexport function openSavedQueryTab(savedQuery: SavedQueryTabInput, targetGroupId?: string): void {\n  const existing = tabs.find(\n    (tab) => tab.type === 'query' && (tab as QueryTab).savedQueryId === savedQuery.id,\n  ) as QueryTab | undefined\n  if (existing) {\n    tabs = tabs.map((tab) => {\n      if (tab.id !== existing.id || tab.type !== 'query') return tab\n      if (tab.dirty) return tab\n      return {\n        ...tab,\n        name: savedQuery.name,\n        sql: savedQuery.query,\n        baseSql: savedQuery.query,\n        dirty: false,\n      }\n    })\n    setActiveTab(existing.id)\n    return\n  }\n\n  const tab = createQueryTab(savedQuery.query, {\n    name: savedQuery.name,\n    savedQueryId: savedQuery.id,\n    baseSql: savedQuery.query,\n  })\n  tabs = [...tabs, tab]\n  const gid = resolveTargetGroupId(targetGroupId)\n  groups = groups.map(g =>\n    g.id === gid ? { ...g, tabIds: [...g.tabIds, tab.id], activeTabId: tab.id } : g,\n  )\n  focusedGroupId = gid\n  queueSave()\n}\n\nexport function openTableTab(database: string, table: string, targetGroupId?: string): void {\n  const existing = tabs.find(\n    t => t.type === 'table' && t.database === database && t.table === table\n  ) as TableTab | undefined\n  if (existing) {\n    setActiveTab(existing.id)\n    return\n  }\n  const tab: TableTab = {\n    id: createUUID(),\n    type: 'table',\n    name: `${database}.${table}`,\n    database,\n    table,\n  }\n  tabs = [...tabs, tab]\n  const gid = resolveTargetGroupId(targetGroupId)\n  groups = groups.map(g =>\n    g.id === gid ? { ...g, tabIds: [...g.tabIds, tab.id], activeTabId: tab.id } : g\n  )\n  focusedGroupId = gid\n  queueSave()\n}\n\nexport function openDatabaseTab(database: string, targetGroupId?: string): void {\n  const existing = tabs.find(\n    t => t.type === 'database' && t.database === database,\n  ) as DatabaseTab | undefined\n  if (existing) {\n    setActiveTab(existing.id)\n    return\n  }\n\n  const tab: DatabaseTab = {\n    id: createUUID(),\n    type: 'database',\n    name: database,\n    database,\n  }\n\n  tabs = [...tabs, tab]\n  const gid = resolveTargetGroupId(targetGroupId)\n  groups = groups.map(g =>\n    g.id === gid ? { ...g, tabIds: [...g.tabIds, tab.id], activeTabId: tab.id } : g,\n  )\n  focusedGroupId = gid\n  queueSave()\n}\n\nexport function openDashboardTab(dashboardId: string, name = 'Dashboard', targetGroupId?: string): void {\n  const existing = tabs.find(\n    t => t.type === 'dashboard' && t.dashboardId === dashboardId,\n  ) as DashboardTab | undefined\n  if (existing) {\n    setActiveTab(existing.id)\n    return\n  }\n\n  const tab: DashboardTab = {\n    id: createUUID(),\n    type: 'dashboard',\n    name,\n    dashboardId,\n  }\n\n  tabs = [...tabs, tab]\n  const gid = resolveTargetGroupId(targetGroupId)\n  groups = groups.map(g =>\n    g.id === gid ? { ...g, tabIds: [...g.tabIds, tab.id], activeTabId: tab.id } : g,\n  )\n  focusedGroupId = gid\n  pushTabRouteForTab(tab)\n  queueSave()\n}\n\nexport function openSingletonTab(type: SingletonTab['type'], name: string, targetGroupId?: string): void {\n  const existing = tabs.find(t => t.type === type)\n  if (existing) {\n    if (existing.name !== name) {\n      tabs = tabs.map((tab) => (tab.id === existing.id ? { ...tab, name } : tab))\n      queueSave()\n    }\n    setActiveTab(existing.id)\n    return\n  }\n  const tab: SingletonTab = {\n    id: createUUID(),\n    type,\n    name,\n  }\n  tabs = [...tabs, tab]\n  const gid = resolveTargetGroupId(targetGroupId)\n  groups = groups.map(g =>\n    g.id === gid ? { ...g, tabIds: [...g.tabIds, tab.id], activeTabId: tab.id } : g\n  )\n  focusedGroupId = gid\n  pushTabRouteForTab(tab)\n  queueSave()\n}\n\n// ── Close / update ───────────────────────────────────────────────\n\nexport function closeTab(id: string): void {\n  if (isHomeTabId(id)) return\n\n  const groupId = findGroupForTab(id)\n  if (!groupId) return\n\n  // Remove from group\n  groups = groups.map(g => {\n    if (g.id !== groupId) return g\n    const newTabIds = g.tabIds.filter(tid => tid !== id)\n    let newActiveId = g.activeTabId\n    if (g.activeTabId === id) {\n      const idx = g.tabIds.indexOf(id)\n      const newIdx = Math.min(idx, newTabIds.length - 1)\n      newActiveId = newTabIds[newIdx] ?? ''\n    }\n    return { ...g, tabIds: newTabIds, activeTabId: newActiveId }\n  })\n\n  // Remove tab data\n  tabs = tabs.filter(t => t.id !== id)\n  clearTabResult(id)\n\n  // Collapse empty group\n  const emptyGroup = groups.find(g => g.tabIds.length === 0)\n  if (emptyGroup) {\n    groups = groups.filter(g => g.tabIds.length > 0)\n    if (groups.length === 0) {\n      // Keep the pinned Home tab alive.\n      const homeTab = createHomeTab()\n      tabs = [homeTab]\n      groups = [{ id: 'left', tabIds: [homeTab.id], activeTabId: homeTab.id }]\n    }\n    focusedGroupId = groups[0].id\n  }\n\n  // If all tabs gone from last group, ensure Home exists.\n  if (tabs.length === 0) {\n    const homeTab = createHomeTab()\n    tabs = [homeTab]\n    groups = [{ id: 'left', tabIds: [homeTab.id], activeTabId: homeTab.id }]\n    focusedGroupId = 'left'\n  }\n\n  const normalized = normalizeTabsState({ tabs, groups, focusedGroupId })\n  tabs = normalized.tabs\n  groups = normalized.groups\n  focusedGroupId = normalized.focusedGroupId\n\n  queueSave()\n}\n\nexport function updateTabSQL(id: string, sql: string): void {\n  tabs = tabs.map((tab) => {\n    if (tab.id !== id || tab.type !== 'query') return tab\n    const baseSql = typeof tab.baseSql === 'string' ? tab.baseSql : ''\n    return { ...tab, sql, dirty: sql !== baseSql }\n  })\n  queueSave()\n}\n\nexport function renameTab(id: string, name: string): void {\n  if (isHomeTabId(id)) return\n  tabs = tabs.map(t => (t.id === id ? { ...t, name } : t))\n  queueSave()\n}\n\nexport function markQueryTabSaved(id: string, options: { savedQueryId?: string; name?: string; baseSql?: string } = {}): void {\n  tabs = tabs.map((tab) => {\n    if (tab.id !== id || tab.type !== 'query') return tab\n    const name = options.name?.trim() ? options.name.trim() : tab.name\n    const baseSql = options.baseSql ?? tab.sql\n    return {\n      ...tab,\n      name,\n      savedQueryId: options.savedQueryId ?? tab.savedQueryId,\n      baseSql,\n      dirty: false,\n    }\n  })\n  queueSave()\n}\n\nexport function isTabDirty(id: string): boolean {\n  const tab = tabs.find((entry) => entry.id === id)\n  if (!tab) return false\n  if (tab.type === 'query') return !!(tab as QueryTab).dirty\n  if (tab.type === 'model') return !!(tab as ModelTab).dirty\n  return false\n}\n\n// ── Reorder (within a group) ─────────────────────────────────────\n\nexport function reorderTab(groupId: string, fromIndex: number, toIndex: number): void {\n  if (fromIndex === toIndex) return\n  groups = groups.map(g => {\n    if (g.id !== groupId) return g\n    const updated = [...g.tabIds]\n    const [moved] = updated.splice(fromIndex, 1)\n    if (!moved || isHomeTabId(moved)) return g\n    updated.splice(toIndex, 0, moved)\n    if (g.id === 'left') {\n      const homeIndex = updated.findIndex((tabId) => isHomeTabId(tabId))\n      if (homeIndex > 0) {\n        const [homeId] = updated.splice(homeIndex, 1)\n        updated.unshift(homeId)\n      }\n    }\n    return { ...g, tabIds: updated }\n  })\n  queueSave()\n}\n\n// ── Split / move / unsplit ───────────────────────────────────────\n\nexport function splitTabToSide(tabId: string, side: 'left' | 'right'): void {\n  if (isHomeTabId(tabId)) return\n\n  if (groups.length >= 2) {\n    moveTabToGroup(tabId, side)\n    return\n  }\n\n  const sourceGroup = groups.find(g => g.tabIds.includes(tabId))\n  if (!sourceGroup || sourceGroup.tabIds.length <= 1) return\n\n  const remainingTabIds = sourceGroup.tabIds.filter(id => id !== tabId)\n  const idx = sourceGroup.tabIds.indexOf(tabId)\n  const remainingActive = sourceGroup.activeTabId === tabId\n    ? remainingTabIds[Math.min(idx, remainingTabIds.length - 1)] ?? remainingTabIds[0] ?? ''\n    : sourceGroup.activeTabId\n\n  if (side === 'right') {\n    groups = [\n      { id: 'left', tabIds: remainingTabIds, activeTabId: remainingActive },\n      { id: 'right', tabIds: [tabId], activeTabId: tabId },\n    ]\n    focusedGroupId = 'right'\n  } else {\n    groups = [\n      { id: 'left', tabIds: [tabId], activeTabId: tabId },\n      { id: 'right', tabIds: remainingTabIds, activeTabId: remainingActive },\n    ]\n    focusedGroupId = 'left'\n  }\n\n  const normalized = normalizeTabsState({ tabs, groups, focusedGroupId })\n  tabs = normalized.tabs\n  groups = normalized.groups\n  focusedGroupId = normalized.focusedGroupId\n\n  queueSave()\n}\n\nexport function splitTab(tabId: string): void {\n  if (isHomeTabId(tabId)) return\n\n  if (groups.length >= 2) {\n    // Already split — move to other group\n    const sourceGroupId = findGroupForTab(tabId)\n    const targetGroupId = sourceGroupId === 'left' ? 'right' : 'left'\n    moveTabToGroup(tabId, targetGroupId)\n    return\n  }\n  splitTabToSide(tabId, 'right')\n}\n\nexport function moveTabToGroup(tabId: string, targetGroupId: string): void {\n  if (isHomeTabId(tabId)) return\n\n  const sourceGroupId = findGroupForTab(tabId)\n  if (!sourceGroupId || sourceGroupId === targetGroupId) return\n\n  // If target group doesn't exist yet, create it (this enables cross-group drag to create split)\n  if (!groups.find(g => g.id === targetGroupId)) {\n    const sourceGroup = groups.find(g => g.id === sourceGroupId)\n    if (!sourceGroup || sourceGroup.tabIds.length <= 1) return\n    // Create split\n    splitTab(tabId)\n    return\n  }\n\n  groups = groups.map(g => {\n    if (g.id === sourceGroupId) {\n      const newTabIds = g.tabIds.filter(id => id !== tabId)\n      const idx = g.tabIds.indexOf(tabId)\n      const newActive = g.activeTabId === tabId\n        ? (newTabIds[Math.min(idx, newTabIds.length - 1)] ?? newTabIds[0] ?? '')\n        : g.activeTabId\n      return { ...g, tabIds: newTabIds, activeTabId: newActive }\n    }\n    if (g.id === targetGroupId) {\n      return { ...g, tabIds: [...g.tabIds, tabId], activeTabId: tabId }\n    }\n    return g\n  })\n\n  // Collapse empty groups\n  const emptyGroup = groups.find(g => g.tabIds.length === 0)\n  if (emptyGroup) {\n    groups = groups.filter(g => g.tabIds.length > 0)\n    focusedGroupId = groups[0]?.id ?? 'left'\n  } else {\n    focusedGroupId = targetGroupId\n  }\n\n  const normalized = normalizeTabsState({ tabs, groups, focusedGroupId })\n  tabs = normalized.tabs\n  groups = normalized.groups\n  focusedGroupId = normalized.focusedGroupId\n\n  queueSave()\n}\n\nexport function unsplit(): void {\n  const allTabIds = groups.flatMap(g => g.tabIds)\n  const homeId = tabs.find((tab) => tab.type === 'home')?.id ?? HOME_TAB_ID\n  const ordered = [homeId, ...allTabIds.filter((id) => id !== homeId)]\n  const preferredActive = groups.find(g => g.id === focusedGroupId)?.activeTabId ?? ordered[0]\n  const activeId = preferredActive === homeId ? homeId : (ordered.includes(preferredActive) ? preferredActive : homeId)\n  groups = [{ id: 'left', tabIds: ordered, activeTabId: activeId }]\n  focusedGroupId = 'left'\n  queueSave()\n}\n\n// ── Model tabs ──────────────────────────────────────────────────\n\nfunction modelEditEqual(a: ModelEditState, b: ModelEditState): boolean {\n  return a.modelName === b.modelName &&\n    a.description === b.description &&\n    a.targetDatabase === b.targetDatabase &&\n    a.materialization === b.materialization &&\n    a.sqlBody === b.sqlBody &&\n    a.tableEngine === b.tableEngine &&\n    a.orderBy === b.orderBy\n}\n\ninterface ModelTabInput {\n  id: string\n  name: string\n  description: string\n  target_database: string\n  materialization: string\n  sql_body: string\n  table_engine: string\n  order_by: string\n  status: string\n  last_error: string | null\n}\n\nexport function openModelTab(model: ModelTabInput, targetGroupId?: string): void {\n  const existing = tabs.find(t => t.type === 'model' && (t as ModelTab).modelId === model.id) as ModelTab | undefined\n  if (existing) {\n    setActiveTab(existing.id)\n    return\n  }\n\n  const editState: ModelEditState = {\n    modelName: model.name,\n    description: model.description,\n    targetDatabase: model.target_database,\n    materialization: model.materialization,\n    sqlBody: model.sql_body,\n    tableEngine: model.table_engine,\n    orderBy: model.order_by,\n  }\n\n  const tab: ModelTab = {\n    id: createUUID(),\n    type: 'model',\n    name: model.name,\n    modelId: model.id,\n    dirty: false,\n    edit: { ...editState },\n    base: { ...editState },\n    status: model.status,\n    lastError: model.last_error ?? null,\n  }\n\n  tabs = [...tabs, tab]\n  const gid = resolveTargetGroupId(targetGroupId)\n  groups = groups.map(g =>\n    g.id === gid ? { ...g, tabIds: [...g.tabIds, tab.id], activeTabId: tab.id } : g\n  )\n  focusedGroupId = gid\n  pushTabRouteForTab(tab)\n  queueSave()\n}\n\nexport function updateModelTabEdit(tabId: string, partial: Partial<ModelEditState>): void {\n  tabs = tabs.map(tab => {\n    if (tab.id !== tabId || tab.type !== 'model') return tab\n    const modelTab = tab as ModelTab\n    const edit = { ...modelTab.edit, ...partial }\n    const dirty = !modelEditEqual(edit, modelTab.base)\n    const name = edit.modelName || modelTab.name\n    return { ...modelTab, edit, dirty, name }\n  })\n  queueSave()\n}\n\nexport function markModelTabSaved(tabId: string, model: { name: string; status: string; last_error: string | null }): void {\n  tabs = tabs.map(tab => {\n    if (tab.id !== tabId || tab.type !== 'model') return tab\n    const modelTab = tab as ModelTab\n    return { ...modelTab, base: { ...modelTab.edit }, dirty: false, name: model.name, status: model.status, lastError: model.last_error }\n  })\n  queueSave()\n}\n\nexport function updateModelTabStatus(tabId: string, status: string, lastError: string | null): void {\n  tabs = tabs.map(tab => {\n    if (tab.id !== tabId || tab.type !== 'model') return tab\n    return { ...tab, status, lastError }\n  })\n  queueSave()\n}\n\n// Legacy alias\nexport const addTab = openQueryTab\n"
  },
  {
    "path": "ui/src/lib/stores/theme.svelte.ts",
    "content": "type Theme = 'dark' | 'light'\n\nconst initial = (localStorage.getItem('ch-ui-theme') as Theme) || 'dark'\nlet theme = $state<Theme>(initial)\n\napplyTheme(initial)\n\nexport function getTheme(): Theme {\n  return theme\n}\n\nexport function toggleTheme(): void {\n  theme = theme === 'dark' ? 'light' : 'dark'\n  localStorage.setItem('ch-ui-theme', theme)\n  applyTheme(theme)\n}\n\nfunction applyTheme(t: Theme): void {\n  if (t === 'dark') {\n    document.documentElement.classList.add('dark')\n  } else {\n    document.documentElement.classList.remove('dark')\n  }\n}\n"
  },
  {
    "path": "ui/src/lib/stores/toast.svelte.ts",
    "content": "import { toast } from 'svelte-sonner'\nimport type { ExternalToast } from 'svelte-sonner'\n\nexport type ToastType = 'info' | 'success' | 'error' | 'warning'\n\nconst DEFAULT_DURATION: Record<ToastType, number> = {\n  success: 3600,\n  info: 4400,\n  warning: 5600,\n  error: 7000,\n}\n\nfunction normalizeMessage(message: string): string {\n  return (message ?? '').trim()\n}\n\nfunction resolveToastOptions(\n  type: ToastType,\n  optionsOrDuration?: number | ExternalToast,\n): ExternalToast {\n  if (typeof optionsOrDuration === 'number') {\n    return { duration: Math.max(0, optionsOrDuration) }\n  }\n  return {\n    duration: optionsOrDuration?.duration ?? DEFAULT_DURATION[type],\n    ...optionsOrDuration,\n  }\n}\n\nexport function addToast(message: string, type: ToastType = 'info', duration?: number): void {\n  switch (type) {\n    case 'success':\n      success(message, duration)\n      return\n    case 'error':\n      error(message, duration)\n      return\n    case 'warning':\n      warning(message, duration)\n      return\n    case 'info':\n    default:\n      info(message, duration)\n      return\n  }\n}\n\nexport function removeToast(id: number | string): void {\n  toast.dismiss(id)\n}\n\nexport function dismiss(id?: number | string): void {\n  toast.dismiss(id)\n}\n\nexport function getToasts() {\n  return toast.getActiveToasts()\n}\n\nexport function success(message: string, optionsOrDuration?: number | ExternalToast): void {\n  const cleanMessage = normalizeMessage(message)\n  if (!cleanMessage) return\n  toast.success(cleanMessage, resolveToastOptions('success', optionsOrDuration))\n}\n\nexport function error(message: string, optionsOrDuration?: number | ExternalToast): void {\n  const cleanMessage = normalizeMessage(message)\n  if (!cleanMessage) return\n  toast.error(cleanMessage, resolveToastOptions('error', optionsOrDuration))\n}\n\nexport function warning(message: string, optionsOrDuration?: number | ExternalToast): void {\n  const cleanMessage = normalizeMessage(message)\n  if (!cleanMessage) return\n  toast.warning(cleanMessage, resolveToastOptions('warning', optionsOrDuration))\n}\n\nexport function info(message: string, optionsOrDuration?: number | ExternalToast): void {\n  const cleanMessage = normalizeMessage(message)\n  if (!cleanMessage) return\n  toast.info(cleanMessage, resolveToastOptions('info', optionsOrDuration))\n}\n"
  },
  {
    "path": "ui/src/lib/types/alerts.ts",
    "content": "export type AlertChannelType = 'smtp' | 'resend' | 'brevo'\nexport type AlertSeverity = 'info' | 'warn' | 'error' | 'critical'\nexport type AlertEventType = 'policy.violation' | 'schedule.failed' | 'schedule.slow' | '*'\n\nexport interface AlertChannel {\n  id: string\n  name: string\n  channel_type: AlertChannelType\n  is_active: boolean\n  created_by?: string | null\n  created_at: string\n  updated_at: string\n  config: Record<string, unknown>\n  has_secret: boolean\n}\n\nexport interface AlertRuleRoute {\n  id: string\n  rule_id: string\n  channel_id: string\n  channel_name: string\n  channel_type: AlertChannelType\n  recipients: string[]\n  is_active: boolean\n  delivery_mode: 'immediate' | 'digest' | string\n  digest_window_minutes: number\n  escalation_channel_id?: string | null\n  escalation_channel_name?: string | null\n  escalation_channel_type?: AlertChannelType | string | null\n  escalation_recipients: string[]\n  escalation_after_failures: number\n  created_at: string\n  updated_at: string\n}\n\nexport interface AlertRule {\n  id: string\n  name: string\n  event_type: AlertEventType | string\n  severity_min: AlertSeverity\n  enabled: boolean\n  cooldown_seconds: number\n  max_attempts: number\n  subject_template?: string | null\n  body_template?: string | null\n  created_by?: string | null\n  created_at: string\n  updated_at: string\n  routes: AlertRuleRoute[]\n}\n\nexport interface AlertEvent {\n  id: string\n  connection_id?: string | null\n  event_type: string\n  severity: AlertSeverity | string\n  title: string\n  message: string\n  payload_json?: string | null\n  fingerprint?: string | null\n  source_ref?: string | null\n  status: string\n  created_at: string\n  processed_at?: string | null\n}\n"
  },
  {
    "path": "ui/src/lib/types/api.ts",
    "content": "/** Standard API response envelope */\nexport interface ApiResponse<T = unknown> {\n  success: boolean\n  error?: string\n  data?: T\n}\n\n/** Session info returned by /api/auth/session */\nexport interface Session {\n  user: string\n  role: string\n  connectionId: string\n  connectionName: string\n  connectionOnline: boolean\n  expiresAt: string\n  version?: string\n  appVersion?: string\n}\n\n/** Connection info */\nexport interface Connection {\n  id: string\n  name: string\n  status: string\n  online: boolean\n  created_at: string\n  host_info?: HostInfo\n}\n\n/** Host machine metrics from agent */\nexport interface HostInfo {\n  hostname: string\n  os: string\n  arch: string\n  cpu_cores: number\n  memory_total: number\n  memory_free: number\n  disk_total: number\n  disk_free: number\n  go_version: string\n  agent_uptime: number\n  collected_at: string\n}\n\n/** License info returned by the server */\nexport interface LicenseInfo {\n  edition: string\n  valid: boolean\n  customer?: string\n  expires_at?: string\n  license_id?: string\n}\n\n/** Saved query */\nexport interface SavedQuery {\n  id: string\n  name: string\n  query: string\n  description?: string\n  created_at: string\n  updated_at: string\n}\n\n/** Dashboard */\nexport interface Dashboard {\n  id: string\n  name: string\n  description: string | null\n  created_by: string\n  created_at: string\n  updated_at: string\n}\n\n/** Dashboard panel */\nexport interface Panel {\n  id: string\n  dashboard_id: string\n  name: string\n  panel_type: string\n  query: string\n  connection_id: string | null\n  config: string\n  layout_x: number\n  layout_y: number\n  layout_w: number\n  layout_h: number\n  created_at: string\n  updated_at: string\n}\n\n/** Scheduled query */\nexport interface Schedule {\n  id: string\n  name: string\n  saved_query_id: string\n  connection_id: string | null\n  cron: string\n  timezone: string\n  enabled: boolean\n  timeout_ms: number\n  last_run_at: string | null\n  next_run_at: string | null\n  last_status: string | null\n  last_error: string | null\n  created_by: string\n  created_at: string\n  updated_at: string\n}\n\n/** Schedule execution run */\nexport interface ScheduleRun {\n  id: string\n  schedule_id: string\n  started_at: string\n  finished_at: string | null\n  status: string\n  rows_affected: number\n  elapsed_ms: number\n  error: string | null\n}\n\n/** Panel visualization config (stored as JSON in panel.config) */\nexport interface PanelConfig {\n  chartType: 'table' | 'stat' | 'timeseries' | 'bar'\n  xColumn?: string\n  yColumns?: string[]\n  colors?: string[]\n  legendPosition?: 'bottom' | 'right' | 'none'\n}\n\n/** Audit log entry */\nexport interface AuditLog {\n  id: string\n  action: string\n  username: string | null\n  details: string | null\n  ip_address: string | null\n  created_at: string\n  parsed_details?: Record<string, unknown>\n}\n\n/** Admin stats overview */\nexport interface AdminStats {\n  users_count: number\n  connections: number\n  online: number\n  login_count: number\n  query_count: number\n}\n"
  },
  {
    "path": "ui/src/lib/types/brain.ts",
    "content": "export interface SchemaContextEntry {\n  database: string\n  table: string\n  columns: { name: string; type: string }[]\n}\n\nexport interface BrainChat {\n  id: string\n  connection_id: string\n  username: string\n  title: string\n  provider_id?: string | null\n  model_id?: string | null\n  archived: boolean\n  last_message_at?: string | null\n  context_database?: string | null\n  context_table?: string | null\n  context_tables?: string | null\n  created_at: string\n  updated_at: string\n}\n\nexport interface BrainMessage {\n  id: string\n  chat_id: string\n  role: 'user' | 'assistant' | string\n  content: string\n  status: string\n  error?: string | null\n  created_at: string\n  updated_at: string\n}\n\nexport interface BrainArtifact {\n  id: string\n  chat_id: string\n  message_id?: string | null\n  type: string\n  title: string\n  content: string\n  created_by?: string | null\n  created_at: string\n}\n\nexport interface BrainModelOption {\n  id: string\n  name: string\n  display_name?: string\n  provider_id: string\n  provider_name: string\n  provider_kind: string\n  is_active: boolean\n  is_default: boolean\n  provider_active: boolean\n  provider_default: boolean\n}\n\nexport interface BrainProviderAdmin {\n  id: string\n  name: string\n  kind: string\n  base_url?: string | null\n  has_api_key: boolean\n  is_active: boolean\n  is_default: boolean\n  created_by?: string | null\n  created_at: string\n  updated_at: string\n}\n\nexport interface BrainSkill {\n  id: string\n  name: string\n  content: string\n  is_active: boolean\n  is_default: boolean\n  created_by?: string | null\n  created_at: string\n  updated_at: string\n}\n"
  },
  {
    "path": "ui/src/lib/types/governance.ts",
    "content": "// ── Sync ────────────────────────────────────────────────────────\n\nexport interface GovernanceSettings {\n  sync_enabled: boolean\n  updated_at: string\n  updated_by: string\n  banner_dismissed: boolean\n  syncer_running: boolean\n}\n\nexport interface SyncState {\n  id: string\n  connection_id: string\n  sync_type: 'metadata' | 'query_log' | 'access'\n  last_synced_at: string | null\n  watermark: string | null\n  status: 'idle' | 'running' | 'error'\n  last_error: string | null\n  row_count: number\n  created_at: string\n  updated_at: string\n}\n\nexport interface SyncResult {\n  metadata?: { databases_synced: number; tables_synced: number; columns_synced: number; schema_changes: number }\n  metadata_error?: string\n  query_log?: { queries_ingested: number; lineage_edges_found: number; violations_found: number; new_watermark: string }\n  query_log_error?: string\n  access?: { users_synced: number; roles_synced: number; grants_synced: number; matrix_entries: number; over_permissions: number }\n  access_error?: string\n}\n\n// ── Overview ────────────────────────────────────────────────────\n\nexport interface GovernanceOverview {\n  database_count: number\n  table_count: number\n  column_count: number\n  tagged_table_count: number\n  user_count: number\n  role_count: number\n  query_count_24h: number\n  lineage_edge_count: number\n  policy_count: number\n  violation_count: number\n  incident_count: number\n  schema_change_count: number\n  sync_states: SyncState[]\n  recent_changes: SchemaChange[]\n  recent_violations: PolicyViolation[]\n}\n\n// ── Metadata ────────────────────────────────────────────────────\n\nexport interface GovDatabase {\n  id: string\n  connection_id: string\n  name: string\n  engine: string\n  first_seen: string\n  last_updated: string\n  is_deleted: boolean\n}\n\nexport interface GovTable {\n  id: string\n  connection_id: string\n  database_name: string\n  table_name: string\n  engine: string\n  table_uuid: string\n  total_rows: number\n  total_bytes: number\n  partition_count: number\n  first_seen: string\n  last_updated: string\n  is_deleted: boolean\n  tags?: string[]\n}\n\nexport interface GovColumn {\n  id: string\n  connection_id: string\n  database_name: string\n  table_name: string\n  column_name: string\n  column_type: string\n  column_position: number\n  default_kind: string | null\n  default_expression: string | null\n  comment: string | null\n  first_seen: string\n  last_updated: string\n  is_deleted: boolean\n  tags?: string[]\n}\n\nexport interface SchemaChange {\n  id: string\n  connection_id: string\n  change_type: string\n  database_name: string\n  table_name: string\n  column_name: string\n  old_value: string\n  new_value: string\n  detected_at: string\n  created_at: string\n}\n\n// ── Query Log ───────────────────────────────────────────────────\n\nexport interface QueryLogEntry {\n  id: string\n  connection_id: string\n  query_id: string\n  ch_user: string\n  query_text: string\n  normalized_hash: string\n  query_kind: string\n  event_time: string\n  duration_ms: number\n  read_rows: number\n  read_bytes: number\n  result_rows: number\n  written_rows: number\n  written_bytes: number\n  memory_usage: number\n  tables_used: string\n  is_error: boolean\n  error_message: string | null\n}\n\nexport interface TopQuery {\n  normalized_hash: string\n  count: number\n  avg_duration_ms: number\n  total_read_rows: number\n  sample_query: string\n  last_seen: string\n}\n\n// ── Lineage ─────────────────────────────────────────────────────\n\nexport interface ColumnLineageEdge {\n  source_column: string\n  target_column: string\n}\n\nexport interface LineageEdge {\n  id: string\n  source_database: string\n  source_table: string\n  target_database: string\n  target_table: string\n  query_id: string\n  edge_type: string\n  ch_user: string\n  detected_at: string\n  column_edges?: ColumnLineageEdge[]\n}\n\nexport interface LineageNode {\n  id: string\n  database: string\n  table: string\n  type: 'source' | 'target' | 'current' | 'materialized_view' | 'view' | string\n  columns?: GovColumn[]\n}\n\nexport interface LineageGraph {\n  nodes: LineageNode[]\n  edges: LineageEdge[]\n}\n\n// ── Tags ────────────────────────────────────────────────────────\n\nexport interface TagEntry {\n  id: string\n  connection_id: string\n  object_type: 'table' | 'column'\n  database_name: string\n  table_name: string\n  column_name: string\n  tag: string\n  tagged_by: string\n  created_at: string\n}\n\n// ── Access ──────────────────────────────────────────────────────\n\nexport interface ChUser {\n  id: string\n  name: string\n  auth_type: string | null\n  host_ip: string | null\n  default_roles: string | null\n  first_seen: string\n  last_updated: string\n}\n\nexport interface ChRole {\n  id: string\n  name: string\n  first_seen: string\n  last_updated: string\n}\n\nexport interface AccessMatrixEntry {\n  id: string\n  user_name: string\n  role_name: string | null\n  database_name: string | null\n  table_name: string | null\n  privilege: string\n  is_direct_grant: boolean\n  last_query_time: string | null\n}\n\nexport interface OverPermission {\n  user_name: string\n  role_name: string | null\n  database_name: string | null\n  table_name: string | null\n  privilege: string\n  last_query_time: string | null\n  days_since_query: number | null\n  reason: string\n}\n\n// ── Policies ────────────────────────────────────────────────────\n\nexport interface Policy {\n  id: string\n  connection_id: string\n  name: string\n  description: string | null\n  object_type: 'database' | 'table' | 'column'\n  object_database: string | null\n  object_table: string | null\n  object_column: string | null\n  required_role: string\n  severity: string\n  enforcement_mode: 'warn' | 'block'\n  enabled: boolean\n  created_by: string | null\n  created_at: string\n  updated_at: string\n}\n\nexport interface PolicyViolation {\n  id: string\n  connection_id: string\n  policy_id: string\n  query_log_id: string\n  ch_user: string\n  violation_detail: string\n  severity: string\n  detection_phase?: 'post_exec' | 'pre_exec_block' | string\n  request_endpoint?: string | null\n  detected_at: string\n  created_at: string\n  policy_name?: string\n}\n\nexport interface GovernanceObjectComment {\n  id: string\n  connection_id: string\n  object_type: 'table' | 'column' | string\n  database_name: string\n  table_name: string\n  column_name: string\n  comment_text: string\n  created_by?: string | null\n  created_at: string\n  updated_at: string\n}\n\nexport interface GovernanceIncident {\n  id: string\n  connection_id: string\n  source_type: 'manual' | 'violation' | 'over_permission' | string\n  source_ref?: string | null\n  dedupe_key?: string | null\n  title: string\n  severity: 'info' | 'warn' | 'error' | 'critical' | string\n  status: 'open' | 'triaged' | 'in_progress' | 'resolved' | 'dismissed' | string\n  assignee?: string | null\n  details?: string | null\n  resolution_note?: string | null\n  occurrence_count: number\n  first_seen_at: string\n  last_seen_at: string\n  resolved_at?: string | null\n  created_by?: string | null\n  created_at: string\n  updated_at: string\n}\n\nexport interface GovernanceIncidentComment {\n  id: string\n  incident_id: string\n  comment_text: string\n  created_by?: string | null\n  created_at: string\n}\n"
  },
  {
    "path": "ui/src/lib/types/models.ts",
    "content": "export type Materialization = 'view' | 'table'\nexport type ModelStatus = 'draft' | 'success' | 'error'\nexport type RunStatus = 'running' | 'success' | 'partial' | 'error'\nexport type ResultStatus = 'pending' | 'running' | 'success' | 'error' | 'skipped'\n\nexport interface Model {\n  id: string\n  name: string\n  description: string\n  connection_id: string\n  target_database: string\n  materialization: Materialization\n  sql_body: string\n  table_engine: string\n  order_by: string\n  status: ModelStatus\n  last_error: string | null\n  last_run_at: string | null\n  created_by: string | null\n  created_at: string\n  updated_at: string\n}\n\nexport interface ModelRun {\n  id: string\n  connection_id: string\n  status: RunStatus\n  total_models: number\n  succeeded: number\n  failed: number\n  skipped: number\n  started_at: string\n  finished_at: string | null\n  triggered_by: string | null\n  created_at: string\n}\n\nexport interface ModelRunResult {\n  id: string\n  run_id: string\n  model_id: string\n  model_name: string\n  status: ResultStatus\n  resolved_sql: string | null\n  elapsed_ms: number\n  error: string | null\n  started_at: string | null\n  finished_at: string | null\n  created_at: string\n}\n\nexport interface DAGNode {\n  id: string\n  data: {\n    name: string\n    materialization: Materialization\n    status: ModelStatus\n    target_database: string\n  }\n  position: { x: number; y: number }\n}\n\nexport interface DAGEdge {\n  id: string\n  source: string\n  target: string\n}\n\nexport interface ModelDAG {\n  nodes: DAGNode[]\n  edges: DAGEdge[]\n}\n\nexport interface ValidationError {\n  model_id?: string\n  model_name?: string\n  error: string\n}\n\nexport interface ValidationResult {\n  valid: boolean\n  errors: ValidationError[]\n}\n\nexport interface ModelSchedule {\n  id: string\n  connection_id: string\n  anchor_model_id: string | null\n  cron: string\n  enabled: boolean\n  last_run_at: string | null\n  next_run_at: string | null\n  last_status: string | null\n  last_error: string | null\n  created_by: string | null\n  created_at: string\n  updated_at: string\n}\n\nexport interface Pipeline {\n  anchor_model_id: string\n  model_ids: string[]\n  schedule: ModelSchedule | null\n}\n\nexport interface ModelEditState {\n  modelName: string\n  description: string\n  targetDatabase: string\n  materialization: string\n  sqlBody: string\n  tableEngine: string\n  orderBy: string\n}\n"
  },
  {
    "path": "ui/src/lib/types/pipelines.ts",
    "content": "export type PipelineStatus = 'draft' | 'stopped' | 'starting' | 'running' | 'error' | 'stopping'\n\nexport type NodeType =\n  | 'source_kafka'\n  | 'source_webhook'\n  | 'source_database'\n  | 'source_s3'\n  | 'sink_clickhouse'\n\nexport interface Pipeline {\n  id: string\n  name: string\n  description: string | null\n  connection_id: string\n  status: PipelineStatus\n  config: string\n  created_by: string | null\n  last_started_at: string | null\n  last_stopped_at: string | null\n  last_error: string | null\n  created_at: string\n  updated_at: string\n}\n\nexport interface PipelineNode {\n  id: string\n  pipeline_id: string\n  node_type: NodeType\n  label: string\n  position_x: number\n  position_y: number\n  config_encrypted: string\n  created_at: string\n  updated_at: string\n}\n\nexport interface PipelineEdge {\n  id: string\n  pipeline_id: string\n  source_node_id: string\n  target_node_id: string\n  source_handle: string | null\n  target_handle: string | null\n  created_at: string\n}\n\nexport interface PipelineGraph {\n  nodes: PipelineNode[]\n  edges: PipelineEdge[]\n}\n\nexport interface PipelineRun {\n  id: string\n  pipeline_id: string\n  status: 'running' | 'success' | 'error' | 'stopped'\n  started_at: string\n  finished_at: string | null\n  rows_ingested: number\n  bytes_ingested: number\n  errors_count: number\n  last_error: string | null\n  metrics_json: string\n  created_at: string\n}\n\nexport interface PipelineRunLog {\n  id: string\n  run_id: string\n  level: 'debug' | 'info' | 'warn' | 'error'\n  message: string\n  created_at: string\n}\n\nexport interface ConnectorFieldDef {\n  key: string\n  label: string\n  type: 'text' | 'password' | 'number' | 'select' | 'textarea' | 'toggle' | 'info'\n  placeholder?: string\n  required?: boolean\n  default?: unknown\n  options?: { value: string; label: string }[]\n  help?: string\n}\n\nexport const SOURCE_NODE_TYPES: { type: NodeType; label: string; description: string }[] = [\n  { type: 'source_kafka', label: 'Kafka', description: 'Stream from Kafka topic' },\n  { type: 'source_webhook', label: 'Webhook', description: 'Receive HTTP POST events' },\n  { type: 'source_database', label: 'Database', description: 'Poll from PostgreSQL, MySQL, or SQLite' },\n  { type: 'source_s3', label: 'S3', description: 'Read files from S3-compatible storage' },\n]\n\nexport const SINK_NODE_TYPES: { type: NodeType; label: string; description: string }[] = [\n  { type: 'sink_clickhouse', label: 'ClickHouse', description: 'Insert into ClickHouse table' },\n]\n\nexport const CONNECTOR_FIELDS: Record<NodeType, ConnectorFieldDef[]> = {\n  source_kafka: [\n    { key: 'brokers', label: 'Brokers', type: 'text', placeholder: 'broker1:9092,broker2:9092', required: true, help: 'Comma-separated list of Kafka broker addresses' },\n    { key: 'topic', label: 'Topic', type: 'text', required: true },\n    { key: 'consumer_group', label: 'Consumer Group', type: 'text', required: true, default: 'ch-ui-pipeline' },\n    { key: 'sasl_mechanism', label: 'SASL Mechanism', type: 'select', options: [\n      { value: '', label: 'None' },\n      { value: 'PLAIN', label: 'PLAIN' },\n      { value: 'SCRAM-SHA-256', label: 'SCRAM-SHA-256' },\n      { value: 'SCRAM-SHA-512', label: 'SCRAM-SHA-512' },\n    ], default: '' },\n    { key: 'sasl_username', label: 'SASL Username', type: 'text' },\n    { key: 'sasl_password', label: 'SASL Password', type: 'password' },\n    { key: 'use_tls', label: 'Enable TLS', type: 'toggle', default: false },\n    { key: 'batch_size', label: 'Batch Size', type: 'number', default: 1000, help: 'Records per batch before flushing to ClickHouse' },\n    { key: 'batch_timeout_ms', label: 'Batch Timeout (ms)', type: 'number', default: 5000 },\n  ],\n  source_webhook: [\n    { key: 'webhook_url', label: 'Webhook URL', type: 'info', help: 'POST JSON data to this URL. Include Authorization: Bearer <token> header if auth token is set.' },\n    { key: 'auth_enabled', label: 'Require Authentication', type: 'toggle', default: false, help: 'When enabled, a Bearer token is generated. Include it in the Authorization header of requests.' },\n    { key: 'batch_size', label: 'Batch Size', type: 'number', default: 100 },\n    { key: 'batch_timeout_ms', label: 'Batch Timeout (ms)', type: 'number', default: 2000 },\n  ],\n  source_database: [\n    { key: 'db_type', label: 'Database Type', type: 'select', required: true, options: [\n      { value: 'postgres', label: 'PostgreSQL' },\n      { value: 'mysql', label: 'MySQL' },\n      { value: 'sqlite', label: 'SQLite' },\n    ] },\n    { key: 'connection_string', label: 'Connection String', type: 'password', required: true, placeholder: 'postgres://user:pass@host/db or /path/to/file.db', help: 'For SQLite, use a file path like /data/my.db' },\n    { key: 'query', label: 'SQL Query', type: 'textarea', required: true, placeholder: 'SELECT * FROM events WHERE id > $1', help: 'Use $1 placeholder with watermark column for incremental polling' },\n    { key: 'poll_interval', label: 'Poll Interval (seconds)', type: 'number', default: 60, help: 'Seconds between each poll' },\n    { key: 'watermark_column', label: 'Watermark Column', type: 'text', help: 'Column for incremental polling (e.g. id or created_at)' },\n    { key: 'batch_size', label: 'Batch Size', type: 'number', default: 1000 },\n  ],\n  source_s3: [\n    { key: 'endpoint', label: 'S3 Endpoint', type: 'text', placeholder: 'https://s3.amazonaws.com', help: 'S3-compatible endpoint URL. Leave empty for AWS S3.' },\n    { key: 'region', label: 'Region', type: 'text', default: 'us-east-1' },\n    { key: 'bucket', label: 'Bucket', type: 'text', required: true },\n    { key: 'prefix', label: 'Key Prefix', type: 'text', placeholder: 'data/events/' },\n    { key: 'access_key', label: 'Access Key ID', type: 'password', required: true },\n    { key: 'secret_key', label: 'Secret Access Key', type: 'password', required: true },\n    { key: 'format', label: 'File Format', type: 'select', required: true, options: [\n      { value: 'json', label: 'JSON' },\n      { value: 'ndjson', label: 'JSON Lines (NDJSON)' },\n      { value: 'csv', label: 'CSV' },\n    ], default: 'json' },\n    { key: 'poll_interval', label: 'Poll Interval (seconds)', type: 'number', default: 300, help: 'Seconds between each poll' },\n    { key: 'batch_size', label: 'Batch Size', type: 'number', default: 1000 },\n  ],\n  sink_clickhouse: [\n    { key: 'database', label: 'Target Database', type: 'text', required: true, default: 'default' },\n    { key: 'table', label: 'Target Table', type: 'text', required: true },\n    { key: 'create_table', label: 'Create Table If Not Exists', type: 'toggle', default: false },\n    { key: 'create_table_engine', label: 'Table Engine', type: 'select', options: [\n      { value: 'MergeTree', label: 'MergeTree' },\n      { value: 'ReplacingMergeTree', label: 'ReplacingMergeTree' },\n      { value: 'SummingMergeTree', label: 'SummingMergeTree' },\n    ], default: 'MergeTree', help: 'Only used when \"Create Table\" is enabled' },\n    { key: 'create_table_order_by', label: 'ORDER BY', type: 'text', placeholder: 'tuple()', help: 'ClickHouse ORDER BY clause' },\n  ],\n}\n"
  },
  {
    "path": "ui/src/lib/types/query.ts",
    "content": "/** Column metadata from ClickHouse */\nexport interface ColumnMeta {\n  name: string\n  type: string\n}\n\n/** Query result in JSONCompact format (positional arrays) */\nexport interface CompactResult {\n  meta: ColumnMeta[]\n  data: unknown[][]\n  rows: number\n  statistics?: QueryStats\n}\n\n/** Query execution statistics */\nexport interface QueryStats {\n  elapsed: number\n  rows_read: number\n  bytes_read: number\n}\n\n/** Explorer data response (server-side paginated) */\nexport interface ExplorerDataResponse {\n  success: boolean\n  meta: ColumnMeta[]\n  data: unknown[][]\n  rows: number\n  total_rows: number\n  page: number\n  page_size: number\n}\n\n/** Legacy query result (JSON format, row objects) */\nexport interface LegacyQueryResult {\n  success: boolean\n  data: Record<string, unknown>[]\n  meta: ColumnMeta[]\n  statistics?: QueryStats\n  rows: number\n  elapsed_ms: number\n}\n\nexport interface SampleQueryResult extends LegacyQueryResult {\n  sampling_mode?: 'per_shard' | 'global'\n  warning?: string\n}\n\nexport interface QueryPlanNode {\n  id: string\n  parent_id?: string\n  level: number\n  label: string\n}\n\nexport interface QueryPlanResult {\n  success: boolean\n  source: string\n  lines: string[]\n  nodes: QueryPlanNode[]\n}\n\nexport interface QueryProfileResult {\n  success: boolean\n  available: boolean\n  reason?: string\n  profile?: Record<string, unknown>\n}\n\n/** Per-table estimate from EXPLAIN ESTIMATE */\nexport interface TableEstimate {\n  database: string\n  table: string\n  parts: number\n  rows: number\n  marks: number\n}\n\n/** Query cost estimate result */\nexport interface QueryEstimateResult {\n  success: boolean\n  tables: TableEstimate[]\n  total_rows: number\n  total_parts: number\n  total_marks: number\n  error?: string\n}\n\n/** NDJSON stream message types */\nexport type StreamMessage =\n  | { type: 'meta'; meta: ColumnMeta[] }\n  | { type: 'chunk'; data: unknown[][]; seq: number }\n  | { type: 'done'; statistics?: QueryStats; total_rows: number }\n  | { type: 'error'; error: string }\n"
  },
  {
    "path": "ui/src/lib/types/schema.ts",
    "content": "/** Database in the schema tree */\nexport interface Database {\n  name: string\n  tables?: Table[]\n  expanded?: boolean\n  loading?: boolean\n}\n\n/** Table in the schema tree */\nexport interface Table {\n  name: string\n  engine?: string\n  columns?: Column[]\n  expanded?: boolean\n  loading?: boolean\n}\n\n/** Column in the schema tree */\nexport interface Column {\n  name: string\n  type: string\n  default_type?: string\n  default_expression?: string\n  comment?: string\n}\n"
  },
  {
    "path": "ui/src/lib/utils/calendar.ts",
    "content": "/** Number of days in a given month (1-12). */\nexport function daysInMonth(year: number, month: number): number {\n  return new Date(year, month, 0).getDate()\n}\n\n/** Day-of-week (0=Sun..6=Sat) for the 1st of a month (1-12). */\nexport function firstDayOfWeek(year: number, month: number): number {\n  return new Date(year, month - 1, 1).getDay()\n}\n\n/**\n * Build a calendar grid for a given month.\n * Returns rows of 7 cells. Cells outside the month are null.\n */\nexport function buildMonthGrid(year: number, month: number): (Date | null)[][] {\n  const total = daysInMonth(year, month)\n  const startDay = firstDayOfWeek(year, month)\n  const grid: (Date | null)[][] = []\n  let day = 1\n\n  for (let row = 0; row < 6; row++) {\n    const week: (Date | null)[] = []\n    for (let col = 0; col < 7; col++) {\n      if (row === 0 && col < startDay) {\n        week.push(null)\n      } else if (day > total) {\n        week.push(null)\n      } else {\n        week.push(new Date(year, month - 1, day))\n        day++\n      }\n    }\n    grid.push(week)\n    if (day > total) break\n  }\n  return grid\n}\n\n/** Navigate months by delta, returns new { year, month }. */\nexport function shiftMonth(year: number, month: number, delta: number): { year: number; month: number } {\n  const d = new Date(year, month - 1 + delta, 1)\n  return { year: d.getFullYear(), month: d.getMonth() + 1 }\n}\n\n/** Check if two dates are the same calendar day. */\nexport function isSameDay(a: Date, b: Date): boolean {\n  return (\n    a.getFullYear() === b.getFullYear() &&\n    a.getMonth() === b.getMonth() &&\n    a.getDate() === b.getDate()\n  )\n}\n\n/** Check if date falls within [from, to] inclusive (day-level). */\nexport function isInRange(date: Date, from: Date, to: Date): boolean {\n  const t = date.getTime()\n  const lo = new Date(from.getFullYear(), from.getMonth(), from.getDate()).getTime()\n  const hi = new Date(to.getFullYear(), to.getMonth(), to.getDate(), 23, 59, 59, 999).getTime()\n  return t >= lo && t <= hi\n}\n\n/** Check if a date is today. */\nexport function isToday(date: Date): boolean {\n  return isSameDay(date, new Date())\n}\n\n/** Month name from month number (1-12). */\nexport function monthName(month: number): string {\n  return new Date(2000, month - 1, 1).toLocaleString('en', { month: 'long' })\n}\n"
  },
  {
    "path": "ui/src/lib/utils/ch-types.ts",
    "content": "/** Map ClickHouse types to display categories for cell rendering */\nexport type DisplayType = 'number' | 'string' | 'date' | 'bool' | 'json' | 'null' | 'unknown'\n\nexport function getDisplayType(chType: string): DisplayType {\n  const t = chType.replace(/Nullable\\((.+)\\)/, '$1').replace(/LowCardinality\\((.+)\\)/, '$1')\n\n  if (/^(U?Int|Float|Decimal)/.test(t)) return 'number'\n  if (/^(Date|DateTime)/.test(t)) return 'date'\n  if (/^(Bool)/.test(t)) return 'bool'\n  if (/^(String|FixedString|Enum|UUID|IPv[46])/.test(t)) return 'string'\n  if (/^(Array|Map|Tuple|Nested|JSON)/.test(t)) return 'json'\n\n  return 'unknown'\n}\n\n/** Check if a value should be right-aligned (numbers) */\nexport function isRightAligned(chType: string): boolean {\n  return getDisplayType(chType) === 'number'\n}\n"
  },
  {
    "path": "ui/src/lib/utils/chart-transform.ts",
    "content": "import type { PanelConfig } from '../types/api'\nimport type uPlot from 'uplot'\n\nexport interface ColumnMeta {\n  name: string\n  type: string\n}\n\nexport const DEFAULT_COLORS = [\n  '#F97316', // orange\n  '#FB923C', // orange light\n  '#F59E0B', // amber\n  '#D97706', // amber deep\n  '#10B981', // emerald\n  '#84CC16', // lime\n  '#EF4444', // red\n  '#EC4899', // pink\n]\n\nexport const TIME_RANGES = [\n  { label: 'Last 5m', value: '5m', seconds: 300 },\n  { label: 'Last 15m', value: '15m', seconds: 900 },\n  { label: 'Last 1h', value: '1h', seconds: 3600 },\n  { label: 'Last 6h', value: '6h', seconds: 21600 },\n  { label: 'Last 24h', value: '24h', seconds: 86400 },\n  { label: 'Last 7d', value: '7d', seconds: 604800 },\n  { label: 'Last 30d', value: '30d', seconds: 2592000 },\n]\n\nexport interface ExtendedPreset {\n  label: string\n  value: string\n  group: 'recent' | 'named' | 'duration'\n}\n\nexport const EXTENDED_PRESETS: ExtendedPreset[] = [\n  { label: 'Last 5 minutes',   value: '5m',   group: 'recent' },\n  { label: 'Last 15 minutes',  value: '15m',  group: 'recent' },\n  { label: 'Last 30 minutes',  value: '30m',  group: 'recent' },\n  { label: 'Last 1 hour',      value: '1h',   group: 'recent' },\n  { label: 'Last 3 hours',     value: '3h',   group: 'recent' },\n  { label: 'Last 6 hours',     value: '6h',   group: 'recent' },\n  { label: 'Last 12 hours',    value: '12h',  group: 'recent' },\n  { label: 'Last 24 hours',    value: '24h',  group: 'recent' },\n  { label: 'Today',            value: 'preset:today',        group: 'named' },\n  { label: 'Yesterday',        value: 'preset:yesterday',    group: 'named' },\n  { label: 'This Week',        value: 'preset:this-week',    group: 'named' },\n  { label: 'Last Week',        value: 'preset:last-week',    group: 'named' },\n  { label: 'This Month',       value: 'preset:this-month',   group: 'named' },\n  { label: 'Last Month',       value: 'preset:last-month',   group: 'named' },\n  { label: 'Last 7 days',      value: '7d',   group: 'duration' },\n  { label: 'Last 30 days',     value: '30d',  group: 'duration' },\n  { label: 'Last 3 Months',    value: 'preset:last-3-months', group: 'named' },\n  { label: 'Last 6 Months',    value: 'preset:last-6-months', group: 'named' },\n]\n\nconst DATE_TYPES = new Set([\n  'Date', 'Date32', 'DateTime', 'DateTime64',\n  'Nullable(Date)', 'Nullable(Date32)', 'Nullable(DateTime)', 'Nullable(DateTime64)',\n])\n\nconst NUMERIC_TYPES = new Set([\n  'UInt8', 'UInt16', 'UInt32', 'UInt64', 'UInt128', 'UInt256',\n  'Int8', 'Int16', 'Int32', 'Int64', 'Int128', 'Int256',\n  'Float32', 'Float64', 'Decimal',\n])\n\nexport function isDateType(chType: string): boolean {\n  if (DATE_TYPES.has(chType)) return true\n  return /^(Nullable\\()?(Date|DateTime)/.test(chType)\n}\n\nexport function isNumericType(chType: string): boolean {\n  const base = chType.replace(/^Nullable\\(/, '').replace(/\\)$/, '')\n  if (NUMERIC_TYPES.has(base)) return true\n  return /^(U?Int|Float|Decimal)/.test(base)\n}\n\n/** True when x column is categorical (not date, not numeric — i.e. String). */\nexport function isCategoricalX(meta: ColumnMeta[], xColumn: string): boolean {\n  const xMeta = meta.find(m => m.name === xColumn)\n  if (!xMeta) return false\n  return !isDateType(xMeta.type) && !isNumericType(xMeta.type)\n}\n\n/**\n * Transform dashboard API row-objects into uPlot's AlignedData format.\n * Returns [xValues[], y1Values[], y2Values[], ...]\n */\nexport function toUPlotData(\n  data: Record<string, unknown>[],\n  meta: ColumnMeta[],\n  config: PanelConfig,\n): uPlot.AlignedData {\n  if (!data.length || !config.xColumn || !config.yColumns?.length) {\n    return [new Float64Array(0)]\n  }\n\n  const xCol = config.xColumn\n  const xMeta = meta.find(m => m.name === xCol)\n  const isTime = xMeta ? isDateType(xMeta.type) : false\n\n  const xArr = new Float64Array(data.length)\n  for (let i = 0; i < data.length; i++) {\n    const raw = data[i][xCol]\n    if (isTime) {\n      const ts = new Date(raw as string).getTime()\n      xArr[i] = ts / 1000 // uPlot uses unix seconds\n    } else {\n      xArr[i] = Number(raw) || i\n    }\n  }\n\n  const series: uPlot.AlignedData = [xArr]\n  for (const yCol of config.yColumns) {\n    const yArr = new Float64Array(data.length)\n    for (let i = 0; i < data.length; i++) {\n      yArr[i] = Number(data[i][yCol]) || 0\n    }\n    series.push(yArr)\n  }\n\n  return series\n}\n\n/** Extract single stat value from first row, first column */\nexport function getStatValue(data: Record<string, unknown>[], meta: ColumnMeta[]): string {\n  if (data.length > 0 && meta.length > 0) {\n    const key = meta[0].name\n    const val = data[0][key] ?? data[0][Object.keys(data[0])[0]]\n    if (val === null || val === undefined) return '--'\n    const num = Number(val)\n    if (!isNaN(num)) {\n      return num.toLocaleString()\n    }\n    return String(val)\n  }\n  return '--'\n}\n"
  },
  {
    "path": "ui/src/lib/utils/dashboard-time.test.ts",
    "content": "import { describe, expect, it } from 'vitest'\nimport {\n  decodeAbsoluteDashboardRange,\n  encodeAbsoluteDashboardRange,\n  formatDashboardTimeRangeLabel,\n  resolveNamedPreset,\n  toDashboardTimeRangePayload,\n} from './dashboard-time'\n\ndescribe('dashboard-time', () => {\n  it('encodes and decodes absolute ranges', () => {\n    const from = '2026-01-01T00:00:00.000Z'\n    const to = '2026-01-01T01:00:00.000Z'\n    const encoded = encodeAbsoluteDashboardRange(from, to)\n    expect(encoded).toBe(`abs:${from}|${to}`)\n    expect(decodeAbsoluteDashboardRange(encoded)).toEqual({ from, to })\n  })\n\n  it('parses shorthand relative tokens', () => {\n    expect(toDashboardTimeRangePayload('5min')).toEqual({\n      type: 'relative',\n      from: '5m',\n      to: 'now',\n    })\n  })\n\n  it('parses explicit relative ranges', () => {\n    expect(toDashboardTimeRangePayload('now-2h to now')).toEqual({\n      type: 'relative',\n      from: 'now-2h',\n      to: 'now',\n    })\n  })\n\n  it('parses absolute range strings', () => {\n    const res = toDashboardTimeRangePayload('2026-01-01T00:00:00Z to 2026-01-01T01:00:00Z')\n    expect(res.type).toBe('absolute')\n    expect(res.from).toBe('2026-01-01T00:00:00.000Z')\n    expect(res.to).toBe('2026-01-01T01:00:00.000Z')\n  })\n\n  it('formats common labels', () => {\n    expect(formatDashboardTimeRangeLabel('1h')).toBe('Last 1h')\n    expect(formatDashboardTimeRangeLabel('7d')).toBe('Last 7d')\n  })\n\n  it('formats new duration labels', () => {\n    expect(formatDashboardTimeRangeLabel('30m')).toBe('Last 30m')\n    expect(formatDashboardTimeRangeLabel('3h')).toBe('Last 3h')\n    expect(formatDashboardTimeRangeLabel('12h')).toBe('Last 12h')\n  })\n\n  it('formats named preset labels', () => {\n    expect(formatDashboardTimeRangeLabel('preset:today')).toBe('Today')\n    expect(formatDashboardTimeRangeLabel('preset:yesterday')).toBe('Yesterday')\n    expect(formatDashboardTimeRangeLabel('preset:this-week')).toBe('This Week')\n    expect(formatDashboardTimeRangeLabel('preset:last-week')).toBe('Last Week')\n    expect(formatDashboardTimeRangeLabel('preset:this-month')).toBe('This Month')\n    expect(formatDashboardTimeRangeLabel('preset:last-month')).toBe('Last Month')\n    expect(formatDashboardTimeRangeLabel('preset:last-3-months')).toBe('Last 3 Months')\n    expect(formatDashboardTimeRangeLabel('preset:last-6-months')).toBe('Last 6 Months')\n  })\n\n  describe('resolveNamedPreset', () => {\n    it('resolves preset:today to absolute range', () => {\n      const result = resolveNamedPreset('preset:today')\n      expect(result).not.toBeNull()\n      const from = new Date(result!.from)\n      const to = new Date(result!.to)\n      expect(from.getHours() + from.getMinutes() + from.getSeconds()).toBe(0)\n      expect(to.getTime()).toBeLessThanOrEqual(Date.now())\n      expect(to.getTime()).toBeGreaterThan(from.getTime())\n    })\n\n    it('resolves preset:yesterday', () => {\n      const result = resolveNamedPreset('preset:yesterday')\n      expect(result).not.toBeNull()\n      const from = new Date(result!.from)\n      const to = new Date(result!.to)\n      expect(to.getTime() - from.getTime()).toBe(86400000) // exactly 1 day\n    })\n\n    it('resolves preset:this-week', () => {\n      const result = resolveNamedPreset('preset:this-week')\n      expect(result).not.toBeNull()\n      const from = new Date(result!.from)\n      expect(from.getDay()).toBe(0) // starts on Sunday\n    })\n\n    it('resolves preset:last-week', () => {\n      const result = resolveNamedPreset('preset:last-week')\n      expect(result).not.toBeNull()\n      const from = new Date(result!.from)\n      const to = new Date(result!.to)\n      expect(from.getDay()).toBe(0)\n      expect(to.getDay()).toBe(0)\n      expect(to.getTime() - from.getTime()).toBe(604800000) // exactly 7 days\n    })\n\n    it('resolves preset:this-month', () => {\n      const result = resolveNamedPreset('preset:this-month')\n      expect(result).not.toBeNull()\n      const from = new Date(result!.from)\n      expect(from.getDate()).toBe(1)\n    })\n\n    it('resolves preset:last-month', () => {\n      const result = resolveNamedPreset('preset:last-month')\n      expect(result).not.toBeNull()\n      const from = new Date(result!.from)\n      const to = new Date(result!.to)\n      expect(from.getDate()).toBe(1)\n      expect(to.getDate()).toBe(1)\n    })\n\n    it('resolves preset:last-3-months and preset:last-6-months', () => {\n      const r3 = resolveNamedPreset('preset:last-3-months')\n      const r6 = resolveNamedPreset('preset:last-6-months')\n      expect(r3).not.toBeNull()\n      expect(r6).not.toBeNull()\n      expect(new Date(r6!.from).getTime()).toBeLessThan(new Date(r3!.from).getTime())\n    })\n\n    it('returns null for unknown presets', () => {\n      expect(resolveNamedPreset('preset:unknown')).toBeNull()\n      expect(resolveNamedPreset('not-a-preset')).toBeNull()\n    })\n  })\n\n  describe('toDashboardTimeRangePayload with named presets', () => {\n    it('resolves preset:today to absolute payload', () => {\n      const result = toDashboardTimeRangePayload('preset:today')\n      expect(result.type).toBe('absolute')\n      expect(result.from).toBeTruthy()\n      expect(result.to).toBeTruthy()\n    })\n\n    it('resolves preset:yesterday to absolute payload', () => {\n      const result = toDashboardTimeRangePayload('preset:yesterday')\n      expect(result.type).toBe('absolute')\n    })\n  })\n})\n"
  },
  {
    "path": "ui/src/lib/utils/dashboard-time.ts",
    "content": "export interface DashboardTimeRangePayload {\n  type: 'relative' | 'absolute'\n  from: string\n  to: string\n}\n\nconst relativeToken = /^(?:now-)?\\s*\\d+\\s*[a-zA-Z]+$/\n\nexport function encodeAbsoluteDashboardRange(fromISO: string, toISO: string): string {\n  return `abs:${fromISO}|${toISO}`\n}\n\nexport function decodeAbsoluteDashboardRange(value: string): { from: string; to: string } | null {\n  const trimmed = value.trim()\n  if (!trimmed.startsWith('abs:')) return null\n  const payload = trimmed.slice(4)\n  const sep = payload.indexOf('|')\n  if (sep <= 0) return null\n  const from = payload.slice(0, sep).trim()\n  const to = payload.slice(sep + 1).trim()\n  if (!from || !to) return null\n  return { from, to }\n}\n\nfunction normalizeRelative(value: string, fallback: string): string {\n  const trimmed = value.trim()\n  if (!trimmed) return fallback\n  const lower = trimmed.toLowerCase()\n\n  if (lower === 'now') return 'now'\n\n  // normalize forms like \"5min\", \"now-5 minutes\", \"2hrs\"\n  const match = lower.match(/^(now-)?\\s*(\\d+)\\s*([a-z]+)$/)\n  if (!match) {\n    return fallback\n  }\n\n  const prefix = match[1] ? 'now-' : ''\n  const amount = match[2]\n  const rawUnit = match[3]\n  let unit = 'm'\n\n  if (rawUnit === 's' || rawUnit === 'sec' || rawUnit === 'secs' || rawUnit === 'second' || rawUnit === 'seconds') {\n    unit = 's'\n  } else if (rawUnit === 'm' || rawUnit === 'min' || rawUnit === 'mins' || rawUnit === 'minute' || rawUnit === 'minutes') {\n    unit = 'm'\n  } else if (rawUnit === 'h' || rawUnit === 'hr' || rawUnit === 'hrs' || rawUnit === 'hour' || rawUnit === 'hours') {\n    unit = 'h'\n  } else if (rawUnit === 'd' || rawUnit === 'day' || rawUnit === 'days') {\n    unit = 'd'\n  } else if (rawUnit === 'w' || rawUnit === 'week' || rawUnit === 'weeks') {\n    unit = 'w'\n  } else if (rawUnit === 'mo' || rawUnit === 'mon' || rawUnit === 'month' || rawUnit === 'months' || rawUnit === 'mth') {\n    unit = 'M'\n  } else if (rawUnit === 'y' || rawUnit === 'yr' || rawUnit === 'yrs' || rawUnit === 'year' || rawUnit === 'years') {\n    unit = 'y'\n  }\n\n  return `${prefix}${amount}${unit}`\n}\n\nfunction isAbsoluteToken(value: string): boolean {\n  if (!value) return false\n  if (value.toLowerCase().startsWith('now')) return false\n  return !Number.isNaN(Date.parse(value))\n}\n\n// ── Named preset resolution ────────────────────────────────\n\nconst PRESET_LABELS: Record<string, string> = {\n  'preset:today': 'Today',\n  'preset:yesterday': 'Yesterday',\n  'preset:this-week': 'This Week',\n  'preset:last-week': 'Last Week',\n  'preset:this-month': 'This Month',\n  'preset:last-month': 'Last Month',\n  'preset:last-3-months': 'Last 3 Months',\n  'preset:last-6-months': 'Last 6 Months',\n}\n\nconst DURATION_LABELS: Record<string, string> = {\n  '5m': 'Last 5m',\n  '15m': 'Last 15m',\n  '30m': 'Last 30m',\n  '1h': 'Last 1h',\n  '3h': 'Last 3h',\n  '6h': 'Last 6h',\n  '12h': 'Last 12h',\n  '24h': 'Last 24h',\n  '7d': 'Last 7d',\n  '30d': 'Last 30d',\n}\n\nexport function resolveNamedPreset(name: string): { from: string; to: string } | null {\n  const now = new Date()\n  const startOfDay = new Date(now.getFullYear(), now.getMonth(), now.getDate())\n\n  switch (name) {\n    case 'preset:today':\n      return { from: startOfDay.toISOString(), to: now.toISOString() }\n    case 'preset:yesterday': {\n      const yd = new Date(startOfDay)\n      yd.setDate(yd.getDate() - 1)\n      return { from: yd.toISOString(), to: startOfDay.toISOString() }\n    }\n    case 'preset:this-week': {\n      const dow = now.getDay()\n      const startOfWeek = new Date(startOfDay)\n      startOfWeek.setDate(startOfWeek.getDate() - dow)\n      return { from: startOfWeek.toISOString(), to: now.toISOString() }\n    }\n    case 'preset:last-week': {\n      const dow = now.getDay()\n      const endOfLastWeek = new Date(startOfDay)\n      endOfLastWeek.setDate(endOfLastWeek.getDate() - dow)\n      const startOfLastWeek = new Date(endOfLastWeek)\n      startOfLastWeek.setDate(startOfLastWeek.getDate() - 7)\n      return { from: startOfLastWeek.toISOString(), to: endOfLastWeek.toISOString() }\n    }\n    case 'preset:this-month': {\n      const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1)\n      return { from: startOfMonth.toISOString(), to: now.toISOString() }\n    }\n    case 'preset:last-month': {\n      const startOfThisMonth = new Date(now.getFullYear(), now.getMonth(), 1)\n      const startOfLastMonth = new Date(now.getFullYear(), now.getMonth() - 1, 1)\n      return { from: startOfLastMonth.toISOString(), to: startOfThisMonth.toISOString() }\n    }\n    case 'preset:last-3-months': {\n      const d = new Date(now)\n      d.setMonth(d.getMonth() - 3)\n      return { from: d.toISOString(), to: now.toISOString() }\n    }\n    case 'preset:last-6-months': {\n      const d = new Date(now)\n      d.setMonth(d.getMonth() - 6)\n      return { from: d.toISOString(), to: now.toISOString() }\n    }\n    default:\n      return null\n  }\n}\n\n// ── Public API ──────────────────────────────────────────────\n\nexport function formatDashboardTimeRangeLabel(value: string): string {\n  const absolute = decodeAbsoluteDashboardRange(value)\n  if (absolute) {\n    const from = new Date(absolute.from).toLocaleString()\n    const to = new Date(absolute.to).toLocaleString()\n    return `${from} -> ${to}`\n  }\n\n  const trimmed = value.trim()\n  if (!trimmed) return 'Last 1h'\n  if (PRESET_LABELS[trimmed]) return PRESET_LABELS[trimmed]\n  if (DURATION_LABELS[trimmed]) return DURATION_LABELS[trimmed]\n  if (trimmed.includes(' to ')) return trimmed\n  return trimmed\n}\n\nexport function toDashboardTimeRangePayload(value: string): DashboardTimeRangePayload {\n  const trimmed = value.trim()\n\n  // Named presets — resolved at query time to absolute ranges\n  if (trimmed.startsWith('preset:')) {\n    const resolved = resolveNamedPreset(trimmed)\n    if (resolved) {\n      return { type: 'absolute', from: resolved.from, to: resolved.to }\n    }\n  }\n\n  const absolute = decodeAbsoluteDashboardRange(trimmed)\n  if (absolute) {\n    return {\n      type: 'absolute',\n      from: absolute.from,\n      to: absolute.to,\n    }\n  }\n\n  if (!trimmed) {\n    return {\n      type: 'relative',\n      from: '1h',\n      to: 'now',\n    }\n  }\n\n  if (trimmed.includes(' to ')) {\n    const [rawFrom, rawTo] = trimmed.split(/\\s+to\\s+/i).map((part) => part.trim())\n    if (isAbsoluteToken(rawFrom) && isAbsoluteToken(rawTo)) {\n      return {\n        type: 'absolute',\n        from: new Date(rawFrom).toISOString(),\n        to: new Date(rawTo).toISOString(),\n      }\n    }\n\n    return {\n      type: 'relative',\n      from: normalizeRelative(rawFrom, '1h'),\n      to: normalizeRelative(rawTo, 'now'),\n    }\n  }\n\n  // Accept Grafana-style shorthand like \"now-5m\", \"now-5min\", \"5m\"\n  if (trimmed.toLowerCase().startsWith('now-') || relativeToken.test(trimmed)) {\n    return {\n      type: 'relative',\n      from: normalizeRelative(trimmed, '1h'),\n      to: 'now',\n    }\n  }\n\n  return {\n    type: 'relative',\n    from: normalizeRelative(trimmed, '1h'),\n    to: 'now',\n  }\n}\n"
  },
  {
    "path": "ui/src/lib/utils/export.ts",
    "content": "import type { ColumnMeta } from '../types/query'\n\nfunction normalizeScalar(val: unknown): string {\n  if (val === null || val === undefined) return ''\n  if (typeof val === 'object') return JSON.stringify(val)\n  return String(val)\n}\n\nfunction escapeDelimited(val: unknown, delimiter: ',' | '\\t'): string {\n  let s = normalizeScalar(val)\n  // Prevent CSV formula injection: prefix dangerous leading characters with a single quote\n  // so spreadsheet applications don't interpret them as formulas\n  if (s.length > 0 && /^[=+\\-@\\t\\r]/.test(s)) {\n    s = \"'\" + s\n  }\n  if (s.includes(delimiter) || s.includes('\"') || s.includes('\\n') || s.includes('\\r')) {\n    return '\"' + s.replace(/\"/g, '\"\"') + '\"'\n  }\n  return s\n}\n\nfunction rowsToObjects(meta: ColumnMeta[], data: unknown[][]): Record<string, unknown>[] {\n  const names = meta.map(c => c.name)\n  return data.map((row) => {\n    const obj: Record<string, unknown> = {}\n    for (let j = 0; j < names.length; j++) obj[names[j]] = row[j] ?? null\n    return obj\n  })\n}\n\n/** Generate RFC 4180 compliant CSV from query results */\nexport function generateCSV(meta: ColumnMeta[], data: unknown[][]): string {\n  const header = meta.map(c => escapeDelimited(c.name, ',')).join(',')\n  const rows = data.map(row => row.map(v => escapeDelimited(v, ',')).join(','))\n  return header + '\\n' + rows.join('\\n')\n}\n\n/** Generate TSV (TabSeparated) */\nexport function generateTSV(meta: ColumnMeta[], data: unknown[][]): string {\n  const header = meta.map(c => escapeDelimited(c.name, '\\t')).join('\\t')\n  const rows = data.map(row => row.map(v => escapeDelimited(v, '\\t')).join('\\t'))\n  return header + '\\n' + rows.join('\\n')\n}\n\n/** Generate JSONEachRow / JSONLines from query results */\nexport function generateJSONLines(meta: ColumnMeta[], data: unknown[][]): string {\n  return rowsToObjects(meta, data).map((obj) => JSON.stringify(obj)).join('\\n')\n}\n\n/** Generate JSON from query results (array of objects) */\nexport function generateJSON(meta: ColumnMeta[], data: unknown[][]): string {\n  return JSON.stringify(rowsToObjects(meta, data), null, 2)\n}\n\n/** Generate JSONCompact-like payload */\nexport function generateJSONCompact(meta: ColumnMeta[], data: unknown[][]): string {\n  return JSON.stringify({\n    meta,\n    data,\n    rows: data.length,\n  }, null, 2)\n}\n\n/** Generate markdown table */\nexport function generateMarkdown(meta: ColumnMeta[], data: unknown[][]): string {\n  if (!meta.length) return ''\n\n  const header = '| ' + meta.map((c) => c.name).join(' | ') + ' |'\n  const separator = '| ' + meta.map(() => '---').join(' | ') + ' |'\n  const rows = data.map((row) => {\n    const cells = row.map((v) => normalizeScalar(v).replace(/\\|/g, '\\\\|').replace(/\\n/g, '<br/>'))\n    return '| ' + cells.join(' | ') + ' |'\n  })\n  return [header, separator, ...rows].join('\\n')\n}\n\nfunction escapeSQLString(value: string): string {\n  return value.replace(/\\\\/g, '\\\\\\\\').replace(/'/g, \"\\\\'\")\n}\n\n/** Generate INSERT INTO ... VALUES SQL */\nexport function generateSQLInsert(meta: ColumnMeta[], data: unknown[][], table = 'result_set'): string {\n  if (!meta.length) return ''\n  const columns = meta.map((c) => `\\`${c.name.replace(/`/g, '``')}\\``).join(', ')\n  const values = data.map((row) => {\n    const fields = row.map((v) => {\n      if (v === null || v === undefined) return 'NULL'\n      if (typeof v === 'number' && Number.isFinite(v)) return String(v)\n      if (typeof v === 'boolean') return v ? '1' : '0'\n      // Large integers are stored as strings by safeParse; emit them unquoted\n      if (typeof v === 'string' && /^-?\\d+$/.test(v)) return v\n      if (typeof v === 'object') return `'${escapeSQLString(JSON.stringify(v))}'`\n      return `'${escapeSQLString(String(v))}'`\n    })\n    return `(${fields.join(', ')})`\n  })\n  return `INSERT INTO \\`${table.replace(/`/g, '``')}\\` (${columns}) VALUES\\n${values.join(',\\n')};`\n}\n\n/** Generate lightweight XML export */\nexport function generateXML(meta: ColumnMeta[], data: unknown[][]): string {\n  const rows = rowsToObjects(meta, data)\n  const xmlTagName = (name: string): string => {\n    const cleaned = name.replace(/[^a-zA-Z0-9_.-]/g, '_')\n    return /^[a-zA-Z_]/.test(cleaned) ? cleaned : `c_${cleaned}`\n  }\n  const escapeXml = (s: string): string => s\n    .replace(/&/g, '&amp;')\n    .replace(/</g, '&lt;')\n    .replace(/>/g, '&gt;')\n    .replace(/\"/g, '&quot;')\n    .replace(/'/g, '&apos;')\n\n  const xmlRows = rows.map((row) => {\n    const cols = meta.map((col) => {\n      const value = row[col.name]\n      const content = value === null || value === undefined ? '' : escapeXml(typeof value === 'object' ? JSON.stringify(value) : String(value))\n      const tag = xmlTagName(col.name)\n      return `    <${tag}>${content}</${tag}>`\n    }).join('\\n')\n    return `  <row>\\n${cols}\\n  </row>`\n  }).join('\\n')\n\n  return `<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n<result>\\n${xmlRows}\\n</result>`\n}\n\n/** Copy text to clipboard */\nexport async function copyToClipboard(text: string): Promise<void> {\n  await navigator.clipboard.writeText(text)\n}\n\n/** Download content as a file */\nexport function downloadFile(content: string, filename: string, mimeType: string): void {\n  const blob = new Blob([content], { type: mimeType })\n  const url = URL.createObjectURL(blob)\n  const a = document.createElement('a')\n  a.href = url\n  a.download = filename\n  document.body.appendChild(a)\n  a.click()\n  document.body.removeChild(a)\n  URL.revokeObjectURL(url)\n}\n"
  },
  {
    "path": "ui/src/lib/utils/format.ts",
    "content": "/** Format a number with locale-aware separators */\nexport function formatNumber(n: number): string {\n  return n.toLocaleString()\n}\n\n/** Format bytes to human readable (KB, MB, GB) */\nexport function formatBytes(bytes: number): string {\n  if (bytes === 0) return '0 B'\n  const units = ['B', 'KB', 'MB', 'GB', 'TB']\n  const i = Math.floor(Math.log(bytes) / Math.log(1024))\n  const value = bytes / Math.pow(1024, i)\n  return `${value.toFixed(i === 0 ? 0 : 1)} ${units[i]}`\n}\n\n/** Format elapsed seconds to human readable */\nexport function formatElapsed(seconds: number): string {\n  if (seconds < 0.001) return '<1ms'\n  if (seconds < 1) return `${(seconds * 1000).toFixed(0)}ms`\n  if (seconds < 60) return `${seconds.toFixed(2)}s`\n  const mins = Math.floor(seconds / 60)\n  const secs = seconds % 60\n  return `${mins}m ${secs.toFixed(0)}s`\n}\n\n/** Format a duration in milliseconds */\nexport function formatDuration(ms: number): string {\n  return formatElapsed(ms / 1000)\n}\n"
  },
  {
    "path": "ui/src/lib/utils/grid-layout.ts",
    "content": "/** Dashboard grid layout utilities — pure functions, no framework dependency */\n\nexport const COLS = 12\nexport const ROW_H = 60\nexport const GAP = 16\nexport const MIN_W = 2\nexport const MIN_H = 2\n\nexport interface LayoutItem {\n  id: string\n  x: number\n  y: number\n  w: number\n  h: number\n}\n\n/** Convert grid coordinates to absolute pixel position */\nexport function gridToPixel(\n  item: { x: number; y: number; w: number; h: number },\n  colW: number,\n): { left: number; top: number; width: number; height: number } {\n  return {\n    left: item.x * (colW + GAP),\n    top: item.y * (ROW_H + GAP),\n    width: item.w * (colW + GAP) - GAP,\n    height: item.h * (ROW_H + GAP) - GAP,\n  }\n}\n\n/** Compute column width from container width */\nexport function calcColW(containerWidth: number): number {\n  return (containerWidth - GAP * (COLS - 1)) / COLS\n}\n\n/** AABB overlap test */\nexport function rectsOverlap(a: LayoutItem, b: LayoutItem): boolean {\n  if (a.id === b.id) return false\n  return (\n    a.x < b.x + b.w &&\n    a.x + a.w > b.x &&\n    a.y < b.y + b.h &&\n    a.y + a.h > b.y\n  )\n}\n\n/**\n * Compact layout: resolve collisions and apply gravity.\n * The moved panel (movedId) keeps its target position;\n * overlapping panels are pushed below it, then all panels\n * are pulled upward as far as possible.\n */\nexport function compact(items: LayoutItem[], movedId?: string): LayoutItem[] {\n  const result = items.map(i => ({ ...i }))\n  const moved = movedId ? result.find(i => i.id === movedId) : undefined\n\n  // Push panels that overlap the moved panel below it\n  if (moved) {\n    for (const item of result) {\n      if (item.id === movedId) continue\n      if (rectsOverlap(moved, item)) {\n        item.y = moved.y + moved.h\n      }\n    }\n    // Cascade: resolve secondary overlaps caused by pushing\n    const others = result.filter(i => i.id !== movedId).sort((a, b) => a.y - b.y || a.x - b.x)\n    for (let i = 0; i < others.length; i++) {\n      for (let j = i + 1; j < others.length; j++) {\n        if (rectsOverlap(others[i], others[j])) {\n          others[j].y = others[i].y + others[i].h\n        }\n      }\n    }\n  }\n\n  // Sort by y then x for gravity pass (moved item excluded from sorting priority)\n  const sorted = [...result].sort((a, b) => a.y - b.y || a.x - b.x)\n\n  // Gravity: pull each non-moved panel upward as far as possible\n  let changed = true\n  while (changed) {\n    changed = false\n    for (const item of sorted) {\n      if (item.id === movedId) continue\n      while (item.y > 0) {\n        const test = { ...item, y: item.y - 1 }\n        const collides = result.some(\n          o => o.id !== item.id && rectsOverlap(test, o),\n        )\n        if (!collides) {\n          item.y--\n          changed = true\n        } else {\n          break\n        }\n      }\n    }\n  }\n\n  return result\n}\n\n/** Compute the total grid height in pixels from the bottommost panel */\nexport function containerHeight(items: LayoutItem[]): number {\n  if (items.length === 0) return ROW_H\n  const maxBottom = Math.max(...items.map(i => i.y + i.h))\n  return maxBottom * (ROW_H + GAP)\n}\n"
  },
  {
    "path": "ui/src/lib/utils/lineage-layout.ts",
    "content": "import type { Node } from '@xyflow/svelte'\nimport type { LineageNode, LineageEdge } from '../types/governance'\n\nconst LAYER_GAP = 300\nconst NODE_GAP = 140\nconst NODE_WIDTH = 220\n\nexport interface LineageFlowNode extends Node {\n  data: {\n    database: string\n    table: string\n    nodeType: string\n    columns: LineageNode['columns']\n    linkedColumns: string[]\n  }\n}\n\n/**\n * Assigns left-to-right layered positions to lineage nodes using BFS.\n * Returns SvelteFlow-compatible Node objects with computed positions.\n */\nexport function layoutLineageGraph(\n  nodes: LineageNode[],\n  edges: LineageEdge[],\n): LineageFlowNode[] {\n  if (nodes.length === 0) return []\n\n  // Build adjacency: source → targets\n  const outgoing = new Map<string, string[]>()\n  const incoming = new Map<string, Set<string>>()\n\n  // Collect all linked columns per node from column_edges\n  const linkedColumnsMap = new Map<string, Set<string>>()\n\n  for (const node of nodes) {\n    outgoing.set(node.id, [])\n    incoming.set(node.id, new Set())\n    linkedColumnsMap.set(node.id, new Set())\n  }\n\n  for (const edge of edges) {\n    const srcKey = `${edge.source_database}.${edge.source_table}`\n    const tgtKey = `${edge.target_database}.${edge.target_table}`\n\n    outgoing.get(srcKey)?.push(tgtKey)\n    incoming.get(tgtKey)?.add(srcKey)\n\n    // Track linked columns\n    if (edge.column_edges) {\n      for (const ce of edge.column_edges) {\n        linkedColumnsMap.get(srcKey)?.add(ce.source_column)\n        linkedColumnsMap.get(tgtKey)?.add(ce.target_column)\n      }\n    }\n  }\n\n  // BFS layering from roots (nodes with no incoming edges)\n  const layers = new Map<string, number>()\n  const queue: string[] = []\n\n  for (const node of nodes) {\n    const inc = incoming.get(node.id)\n    if (!inc || inc.size === 0) {\n      layers.set(node.id, 0)\n      queue.push(node.id)\n    }\n  }\n\n  // If no roots found (cycle), assign all to layer 0\n  if (queue.length === 0) {\n    for (const node of nodes) {\n      layers.set(node.id, 0)\n      queue.push(node.id)\n    }\n  }\n\n  let head = 0\n  while (head < queue.length) {\n    const current = queue[head++]\n    const currentLayer = layers.get(current) ?? 0\n    for (const target of outgoing.get(current) ?? []) {\n      const existingLayer = layers.get(target)\n      if (existingLayer === undefined || existingLayer < currentLayer + 1) {\n        layers.set(target, currentLayer + 1)\n        queue.push(target)\n      }\n    }\n  }\n\n  // Group nodes by layer\n  const layerGroups = new Map<number, LineageNode[]>()\n  for (const node of nodes) {\n    const layer = layers.get(node.id) ?? 0\n    const group = layerGroups.get(layer) ?? []\n    group.push(node)\n    layerGroups.set(layer, group)\n  }\n\n  // Assign positions\n  const nodeMap = new Map(nodes.map((n) => [n.id, n]))\n  const flowNodes: LineageFlowNode[] = []\n\n  for (const [layer, group] of layerGroups) {\n    const x = layer * LAYER_GAP\n    const totalHeight = group.length * NODE_GAP\n    const startY = -totalHeight / 2\n\n    for (let i = 0; i < group.length; i++) {\n      const node = group[i]\n      const linked = linkedColumnsMap.get(node.id)\n\n      flowNodes.push({\n        id: node.id,\n        type: 'lineageTable',\n        position: { x, y: startY + i * NODE_GAP },\n        width: NODE_WIDTH,\n        data: {\n          database: node.database,\n          table: node.table,\n          nodeType: node.type,\n          columns: node.columns ?? [],\n          linkedColumns: linked ? [...linked] : [],\n        },\n      })\n    }\n  }\n\n  return flowNodes\n}\n"
  },
  {
    "path": "ui/src/lib/utils/safe-json.ts",
    "content": "/**\n * safe-json.ts\n *\n * Precision-safe JSON parsing for ClickHouse results.\n *\n * Problem: JavaScript's JSON.parse() converts all numbers to IEEE 754 Float64,\n * which only has ~15.9 significant digits. ClickHouse UInt64/Int64 values like\n * order IDs (e.g. 816687988383154176) are silently rounded to a phantom value\n * (816687988383154200), making them useless as identifiers.\n *\n * Solution: Intercept large integers before they lose precision:\n *   1. Primary: Use TC39 Stage 4 reviver `context.source` (native, zero-cost).\n *      Supported in Chrome 114+, Firefox 135+, Safari 18.4+ (~86% of users).\n *   2. Fallback: json-bigint with `storeAsString: true` for older browsers.\n *\n * Large integers are returned as strings. Consumer code must handle both\n * `number` (safe integers) and `string` (large integers) for numeric columns.\n */\n\nimport JSONbig from 'json-bigint'\n\n// Feature-detect TC39 reviver context.source support\n// The 3rd `context` parameter is TC39 Stage 4 but not yet in TypeScript's lib types.\nlet hasReviverSource = false\ntry {\n  const probe = (_key: string, _value: unknown, ctx: { source?: string }): unknown => {\n    if (typeof ctx?.source === 'string') hasReviverSource = true\n    return _value\n  }\n  JSON.parse('1', probe as (key: string, value: unknown) => unknown)\n} catch {\n  // Older environments may throw on the extra argument; fallback is used\n}\n\n// Lazy-initialised fallback parser (json-bigint allocates a parser object)\nlet _fallbackParser: ReturnType<typeof JSONbig> | null = null\nfunction getFallbackParser() {\n  if (!_fallbackParser) {\n    _fallbackParser = JSONbig({ storeAsString: true })\n  }\n  return _fallbackParser\n}\n\n/**\n * Parse a JSON string with precision-safe handling of large integers.\n *\n * Safe integers (|n| <= 2^53 - 1) are returned as `number`, exactly as\n * standard JSON.parse does. Large integers are returned as `string` to\n * preserve all digits. Everything else is unchanged.\n */\nexport function safeParse(text: string): any {\n  if (hasReviverSource) {\n    const reviver = (_key: string, value: unknown, ctx: { source: string }): unknown => {\n      // ctx.source is the raw token string from the original JSON text.\n      // If the parsed value rounded (i.e., it's a number that isn't a safe\n      // integer) and the raw source was a plain integer literal, keep the\n      // raw string so no precision is lost.\n      if (\n        typeof value === 'number' &&\n        !Number.isSafeInteger(value) &&\n        /^-?\\d+$/.test(ctx.source)\n      ) {\n        return ctx.source\n      }\n      return value\n    }\n    return JSON.parse(text, reviver as (key: string, value: unknown) => unknown)\n  }\n\n  // Fallback for browsers without reviver context.source support\n  return getFallbackParser().parse(text)\n}\n"
  },
  {
    "path": "ui/src/lib/utils/sql.ts",
    "content": "const WRITE_PATTERN = /^\\s*(INSERT|CREATE|DROP|ALTER|TRUNCATE|RENAME|ATTACH|DETACH|OPTIMIZE|GRANT|REVOKE|KILL|SYSTEM|SET|USE)\\b/i\n\n/** Check if a query is a write (DDL/DML) operation */\nexport function isWriteQuery(query: string): boolean {\n  // Strip leading SQL comments\n  const stripped = query.replace(/^\\s*--.*$/gm, '').trim()\n  return WRITE_PATTERN.test(stripped)\n}\n"
  },
  {
    "path": "ui/src/lib/utils/stats.ts",
    "content": "import type { ColumnMeta } from '../types/query'\nimport { getDisplayType, type DisplayType } from './ch-types'\n\nexport interface ColumnStats {\n  name: string\n  type: string\n  displayType: DisplayType\n  count: number\n  nulls: number\n  nullPct: number\n  // numeric\n  min?: number\n  max?: number\n  avg?: number\n  sum?: number\n  // string\n  minLen?: number\n  maxLen?: number\n  avgLen?: number\n  distinct?: number\n  // date\n  earliest?: string\n  latest?: string\n}\n\nconst DISTINCT_SAMPLE = 10000\n\n/** Compute per-column statistics in a single pass */\nexport function computeColumnStats(meta: ColumnMeta[], data: unknown[][]): ColumnStats[] {\n  return meta.map((col, ci) => {\n    const dt = getDisplayType(col.type)\n    const total = data.length\n    let nulls = 0\n\n    if (dt === 'number') {\n      let min = Infinity\n      let max = -Infinity\n      let sum = 0\n      let numCount = 0\n\n      for (let r = 0; r < total; r++) {\n        const v = data[r][ci]\n        if (v === null || v === undefined || v === '') { nulls++; continue }\n        const n = Number(v)\n        if (Number.isNaN(n)) { nulls++; continue }\n        numCount++\n        sum += n\n        if (n < min) min = n\n        if (n > max) max = n\n      }\n\n      return {\n        name: col.name, type: col.type, displayType: dt,\n        count: total, nulls, nullPct: total > 0 ? (nulls / total) * 100 : 0,\n        min: numCount > 0 ? min : undefined,\n        max: numCount > 0 ? max : undefined,\n        avg: numCount > 0 ? sum / numCount : undefined,\n        sum: numCount > 0 ? sum : undefined,\n      }\n    }\n\n    if (dt === 'string') {\n      let minLen = Infinity\n      let maxLen = 0\n      let totalLen = 0\n      let strCount = 0\n      const seen = new Set<string>()\n      const sampleLimit = Math.min(total, DISTINCT_SAMPLE)\n\n      for (let r = 0; r < total; r++) {\n        const v = data[r][ci]\n        if (v === null || v === undefined) { nulls++; continue }\n        const s = String(v)\n        strCount++\n        totalLen += s.length\n        if (s.length < minLen) minLen = s.length\n        if (s.length > maxLen) maxLen = s.length\n        if (r < sampleLimit) seen.add(s)\n      }\n\n      return {\n        name: col.name, type: col.type, displayType: dt,\n        count: total, nulls, nullPct: total > 0 ? (nulls / total) * 100 : 0,\n        minLen: strCount > 0 ? minLen : undefined,\n        maxLen: strCount > 0 ? maxLen : undefined,\n        avgLen: strCount > 0 ? totalLen / strCount : undefined,\n        distinct: strCount > 0 ? seen.size : undefined,\n      }\n    }\n\n    if (dt === 'date') {\n      let earliest = ''\n      let latest = ''\n\n      for (let r = 0; r < total; r++) {\n        const v = data[r][ci]\n        if (v === null || v === undefined || v === '') { nulls++; continue }\n        const s = String(v)\n        if (!earliest || s < earliest) earliest = s\n        if (!latest || s > latest) latest = s\n      }\n\n      return {\n        name: col.name, type: col.type, displayType: dt,\n        count: total, nulls, nullPct: total > 0 ? (nulls / total) * 100 : 0,\n        earliest: earliest || undefined,\n        latest: latest || undefined,\n      }\n    }\n\n    // bool / json / unknown — just count + nulls\n    for (let r = 0; r < total; r++) {\n      const v = data[r][ci]\n      if (v === null || v === undefined) nulls++\n    }\n\n    return {\n      name: col.name, type: col.type, displayType: dt,\n      count: total, nulls, nullPct: total > 0 ? (nulls / total) * 100 : 0,\n    }\n  })\n}\n"
  },
  {
    "path": "ui/src/lib/utils/uuid.ts",
    "content": "function bytesToHex(bytes: Uint8Array): string {\n  return Array.from(bytes, (byte) => byte.toString(16).padStart(2, '0')).join('')\n}\n\nexport function createUUID(): string {\n  const cryptoObject = globalThis.crypto\n\n  if (cryptoObject?.randomUUID) {\n    return cryptoObject.randomUUID()\n  }\n\n  if (cryptoObject?.getRandomValues) {\n    const bytes = new Uint8Array(16)\n    cryptoObject.getRandomValues(bytes)\n    // RFC 4122 v4 bits.\n    bytes[6] = (bytes[6] & 0x0f) | 0x40\n    bytes[8] = (bytes[8] & 0x3f) | 0x80\n    const hex = bytesToHex(bytes)\n    return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20)}`\n  }\n\n  // Last-resort fallback for very restricted environments.\n  const seed = `${Date.now()}-${Math.random()}-${Math.random()}`\n  let hash = 0\n  for (let i = 0; i < seed.length; i++) {\n    hash = ((hash << 5) - hash) + seed.charCodeAt(i)\n    hash |= 0\n  }\n  const base = Math.abs(hash).toString(16).padStart(8, '0')\n  return `${base.slice(0, 8)}-${base.slice(0, 4)}-4${base.slice(0, 3)}-a${base.slice(0, 3)}-${base}${base.slice(0, 4)}`\n}\n"
  },
  {
    "path": "ui/src/main.ts",
    "content": "import { mount } from 'svelte'\nimport './app.css'\nimport App from './App.svelte'\n\nconst app = mount(App, {\n  target: document.getElementById('app')!,\n})\n\nexport default app\n"
  },
  {
    "path": "ui/src/pages/Admin.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import type { AdminStats } from '../lib/types/api'\n  import { apiGet, apiPut, apiDel, apiPost } from '../lib/api/client'\n  import type { BrainModelOption, BrainProviderAdmin, BrainSkill } from '../lib/types/brain'\n  import {\n    adminBulkUpdateBrainModels,\n    adminCreateBrainProvider,\n    adminCreateBrainSkill,\n    adminDeleteBrainProvider,\n    adminListBrainModels,\n    adminListBrainProviders,\n    adminListBrainSkills,\n    adminSyncBrainProviderModels,\n    adminUpdateBrainModel,\n    adminUpdateBrainProvider,\n    adminUpdateBrainSkill,\n  } from '../lib/api/brain'\n  import { success as toastSuccess, error as toastError } from '../lib/stores/toast.svelte'\n  import Spinner from '../lib/components/common/Spinner.svelte'\n  import Combobox from '../lib/components/common/Combobox.svelte'\n  import type { ComboboxOption } from '../lib/components/common/Combobox.svelte'\n  import Sheet from '../lib/components/common/Sheet.svelte'\n  import HelpTip from '../lib/components/common/HelpTip.svelte'\n  import ConfirmDialog from '../lib/components/common/ConfirmDialog.svelte'\n  import { Shield, RefreshCw, Users, Database, Activity, LogIn, ChevronDown, ChevronRight, Brain, UserPlus, KeyRound, Trash2, Plus, Copy, Telescope } from 'lucide-svelte'\n\n  // Tab state\n  type AdminTab = 'overview' | 'tunnels' | 'users' | 'brain' | 'langfuse'\n  const adminTabIds: AdminTab[] = ['overview', 'tunnels', 'users', 'brain', 'langfuse']\n  let activeTab = $state<AdminTab>('overview')\n\n  type TunnelConnection = {\n    id: string\n    name: string\n    status: string\n    online: boolean\n    created_at: string\n    last_seen?: string\n    host_info?: {\n      hostname?: string\n      os?: string\n    } | null\n  }\n  type TunnelTokenResponse = {\n    tunnel_token: string\n    setup_instructions?: {\n      connect?: string\n      service?: string\n    }\n    message?: string\n    connection?: {\n      id?: string\n      name?: string\n    }\n  }\n\n  // Overview\n  let stats = $state<AdminStats | null>(null)\n  let connections = $state<any[]>([])\n  let statsLoading = $state(true)\n\n  // Tunnels\n  let tunnels = $state<TunnelConnection[]>([])\n  let tunnelsLoading = $state(false)\n  let tunnelCreateName = $state('')\n  let tunnelCreateLoading = $state(false)\n  let tunnelDeleteLoading = $state(false)\n  let tunnelDeleteConfirmOpen = $state(false)\n  let tunnelDeleteTarget = $state<TunnelConnection | null>(null)\n  let tunnelTokenPreview = $state<{\n    connectionId: string\n    connectionName: string\n    token: string\n    connectCmd: string\n    serviceCmd: string\n  } | null>(null)\n\n  // Users\n  let users = $state<any[]>([])\n  let usersSyncCheck = $state(false)\n  let userRoles = $state<Record<string, string>>({})\n  let chUsers = $state<any[]>([])\n  let usersLoading = $state(true)\n  let roleSavingUser = $state<string | null>(null)\n  let createCHUserSheetOpen = $state(false)\n  let editCHUserPasswordSheetOpen = $state(false)\n  let deleteCHUserConfirmOpen = $state(false)\n  let deleteCHUserLoading = $state(false)\n  let selectedCHUserName = $state('')\n  let createCHUserErrorMessage = $state('')\n  let createCHUserExecutedCommands = $state<string[]>([])\n  let createCHUserForm = $state({\n    name: '',\n    authType: 'sha256_password',\n    password: '',\n    defaultRoles: '',\n    ifNotExists: true,\n  })\n  let updateCHUserPasswordForm = $state({\n    authType: 'sha256_password',\n    password: '',\n    ifExists: true,\n  })\n\n  // Brain admin\n  let brainLoading = $state(false)\n  let brainProviders = $state<BrainProviderAdmin[]>([])\n  let brainModels = $state<BrainModelOption[]>([])\n  let brainSkills = $state<BrainSkill[]>([])\n  let modelProviderFilter = $state('')\n  let modelSearch = $state('')\n  let modelShowOnlyActive = $state(false)\n  let providerSheetOpen = $state(false)\n  let skillSheetOpen = $state(false)\n  let deletingProvider = $state<BrainProviderAdmin | null>(null)\n\n  // Langfuse\n  let langfuseLoading = $state(false)\n  let langfuseLoaded = $state(false)\n  let langfuseTesting = $state(false)\n  let langfuseSaving = $state(false)\n  let langfuseConfig = $state({\n    publicKey: '',\n    baseUrl: 'https://cloud.langfuse.com',\n    hasSecretKey: false,\n    enabled: false,\n  })\n  let langfuseSecretKey = $state('')\n\n  const roleOptions: ComboboxOption[] = [\n    { value: 'admin', label: 'admin' },\n    { value: 'analyst', label: 'analyst' },\n    { value: 'viewer', label: 'viewer' },\n  ]\n\n  const providerKindOptions: ComboboxOption[] = [\n    { value: 'openai', label: 'openai' },\n    { value: 'openai_compatible', label: 'openai_compatible' },\n    { value: 'ollama', label: 'ollama' },\n  ]\n  const providerBaseUrls: Record<string, string> = {\n    openai: 'https://api.openai.com/v1',\n    openai_compatible: '',\n    ollama: 'http://localhost:11434/v1',\n  }\n  const clickHouseAuthTypeOptions: ComboboxOption[] = [\n    { value: 'sha256_password', label: 'sha256_password' },\n    { value: 'plaintext_password', label: 'plaintext_password' },\n    { value: 'double_sha1_password', label: 'double_sha1_password' },\n    { value: 'no_password', label: 'no_password' },\n  ]\n  let providerForm = $state({\n    name: '',\n    kind: 'openai',\n    baseUrl: '',\n    apiKey: '',\n    isActive: true,\n    isDefault: false,\n  })\n  let skillForm = $state({\n    name: 'Default Brain Skill',\n    content: '',\n    isActive: true,\n    isDefault: true,\n  })\n\n  function normalizeAdminTab(value: string | null | undefined): AdminTab {\n    const raw = (value ?? '').trim().toLowerCase()\n    if ((adminTabIds as string[]).includes(raw)) return raw as AdminTab\n    return 'overview'\n  }\n\n  function syncAdminTabParam(tab: AdminTab) {\n    if (typeof window === 'undefined') return\n    const url = new URL(window.location.href)\n    if (url.searchParams.get('tab') === tab) return\n    url.searchParams.set('tab', tab)\n    history.replaceState(null, '', `${url.pathname}?${url.searchParams.toString()}`)\n  }\n\n  onMount(() => {\n    loadStats()\n    loadConnections()\n    const initialTab = normalizeAdminTab(\n      typeof window === 'undefined' ? null : new URLSearchParams(window.location.search).get('tab'),\n    )\n    switchTab(initialTab, true)\n  })\n\n  async function loadStats() {\n    statsLoading = true\n    try {\n      stats = await apiGet<AdminStats>('/api/admin/stats')\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      statsLoading = false\n    }\n  }\n\n  async function loadConnections() {\n    try {\n      connections = await apiGet<any[]>('/api/admin/connections')\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function loadUsers() {\n    usersLoading = true\n    try {\n      const [usersResponse, roles] = await Promise.all([\n        apiGet<any>('/api/admin/users'),\n        apiGet<any[]>('/api/admin/user-roles'),\n      ])\n      if (Array.isArray(usersResponse)) {\n        users = usersResponse\n        usersSyncCheck = false\n      } else {\n        users = usersResponse?.users ?? []\n        usersSyncCheck = !!usersResponse?.sync?.clickhouse_user_check\n      }\n      const map: Record<string, string> = {}\n      for (const r of roles ?? []) {\n        map[r.username] = r.role\n      }\n      userRoles = map\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      usersLoading = false\n    }\n  }\n\n  async function refreshUsersTab() {\n    await Promise.all([loadUsers(), loadClickHouseUsers()])\n  }\n\n  async function loadClickHouseUsers() {\n    try {\n      const res = await apiGet<{ data: any[]; meta: any[] }>('/api/admin/clickhouse-users')\n      chUsers = res.data ?? []\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  function formatCHDefaultRoles(row: any): string {\n    if (row?.default_roles_all === 1 || row?.default_roles_all === true) return 'ALL'\n    const list = row?.default_roles_list\n    if (Array.isArray(list) && list.length > 0) return list.join(', ')\n    if (typeof list === 'string' && list.trim()) {\n      const trimmed = list.trim()\n      if (trimmed.startsWith('[') && trimmed.endsWith(']')) {\n        try {\n          const parsed = JSON.parse(trimmed)\n          if (Array.isArray(parsed) && parsed.length > 0) return parsed.join(', ')\n        } catch {\n          return trimmed\n        }\n      }\n      return trimmed\n    }\n    return '—'\n  }\n\n  function resetCreateCHUserForm() {\n    createCHUserErrorMessage = ''\n    createCHUserExecutedCommands = []\n    createCHUserForm = {\n      name: '',\n      authType: 'sha256_password',\n      password: '',\n      defaultRoles: '',\n      ifNotExists: true,\n    }\n  }\n\n  function openCreateCHUserSheet() {\n    resetCreateCHUserForm()\n    createCHUserSheetOpen = true\n  }\n\n  function openUpdateCHUserPasswordSheet(username: string, authType: string | null | undefined) {\n    selectedCHUserName = username\n    updateCHUserPasswordForm = {\n      authType: (authType || 'sha256_password').toLowerCase(),\n      password: '',\n      ifExists: true,\n    }\n    editCHUserPasswordSheetOpen = true\n  }\n\n  async function createClickHouseUser() {\n    createCHUserErrorMessage = ''\n    if (!createCHUserForm.name.trim()) {\n      toastError('Username is required')\n      return\n    }\n    if (createCHUserForm.authType !== 'no_password' && !createCHUserForm.password.trim()) {\n      toastError('Password is required for selected auth type')\n      return\n    }\n\n    const defaultRoles = createCHUserForm.defaultRoles\n      .split(',')\n      .map(v => v.trim())\n      .filter(Boolean)\n\n    try {\n      const result = await apiPost<{ commands?: string[]; command?: string }>('/api/admin/clickhouse-users', {\n        name: createCHUserForm.name.trim(),\n        auth_type: createCHUserForm.authType,\n        password: createCHUserForm.password,\n        default_roles: defaultRoles,\n        if_not_exists: createCHUserForm.ifNotExists,\n      })\n      createCHUserExecutedCommands = result?.commands ?? (result?.command ? [result.command] : [])\n      toastSuccess(`ClickHouse user \"${createCHUserForm.name.trim()}\" created`)\n      createCHUserSheetOpen = false\n      resetCreateCHUserForm()\n      await refreshUsersTab()\n    } catch (e: any) {\n      createCHUserErrorMessage = e.message ?? 'Create user failed'\n      toastError('Create user failed. See details in the sheet.')\n    }\n  }\n\n  function escapeIdentifierForPreview(input: string): string {\n    return `\\`${input.replace(/`/g, '``')}\\``\n  }\n\n  function escapeLiteralForPreview(input: string): string {\n    return input.replace(/\\\\/g, '\\\\\\\\').replace(/'/g, \"\\\\'\")\n  }\n\n  function buildCreateCHUserCommandPreview(): string {\n    const user = createCHUserForm.name.trim() || 'new_user'\n    const auth = createCHUserForm.authType\n    const pass = createCHUserForm.password\n    const ifNotExists = createCHUserForm.ifNotExists\n    const roles = createCHUserForm.defaultRoles\n      .split(',')\n      .map((value) => value.trim())\n      .filter(Boolean)\n\n    const createParts: string[] = ['CREATE USER']\n    if (ifNotExists) createParts.push('IF NOT EXISTS')\n    createParts.push(escapeIdentifierForPreview(user))\n    if (auth === 'plaintext_password') {\n      createParts.push(`IDENTIFIED BY '${escapeLiteralForPreview(pass || 'password')}'`)\n    } else if (auth !== 'no_password') {\n      createParts.push(`IDENTIFIED WITH ${auth} BY '${escapeLiteralForPreview(pass || 'password')}'`)\n    }\n\n    const statements = [createParts.join(' ')]\n    if (roles.length > 0) {\n      const escapedRoles = roles.map((role) => escapeIdentifierForPreview(role)).join(', ')\n      statements.push(`GRANT ${escapedRoles} TO ${escapeIdentifierForPreview(user)}`)\n      statements.push(`ALTER USER ${escapeIdentifierForPreview(user)} DEFAULT ROLE ${escapedRoles}`)\n    }\n    return statements.join(';\\n') + ';'\n  }\n\n  async function updateClickHouseUserPassword() {\n    if (!selectedCHUserName) return\n    if (updateCHUserPasswordForm.authType !== 'no_password' && !updateCHUserPasswordForm.password.trim()) {\n      toastError('Password is required for selected auth type')\n      return\n    }\n\n    try {\n      await apiPut(`/api/admin/clickhouse-users/${encodeURIComponent(selectedCHUserName)}/password`, {\n        auth_type: updateCHUserPasswordForm.authType,\n        password: updateCHUserPasswordForm.password,\n        if_exists: updateCHUserPasswordForm.ifExists,\n      })\n      toastSuccess(`Password updated for \"${selectedCHUserName}\"`)\n      editCHUserPasswordSheetOpen = false\n      selectedCHUserName = ''\n      updateCHUserPasswordForm = {\n        authType: 'sha256_password',\n        password: '',\n        ifExists: true,\n      }\n      await loadClickHouseUsers()\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  function openDeleteCHUserConfirm(username: string) {\n    selectedCHUserName = username\n    deleteCHUserConfirmOpen = true\n  }\n\n  function cancelDeleteCHUser() {\n    deleteCHUserConfirmOpen = false\n    deleteCHUserLoading = false\n    selectedCHUserName = ''\n  }\n\n  async function confirmDeleteCHUser() {\n    if (!selectedCHUserName) return\n    deleteCHUserLoading = true\n    try {\n      await apiDel(`/api/admin/clickhouse-users/${encodeURIComponent(selectedCHUserName)}`)\n      toastSuccess(`ClickHouse user \"${selectedCHUserName}\" deleted`)\n      cancelDeleteCHUser()\n      await refreshUsersTab()\n    } catch (e: any) {\n      deleteCHUserLoading = false\n      toastError(e.message)\n    }\n  }\n\n  function setTunnelTokenPreview(connection: { id: string; name: string }, payload: TunnelTokenResponse) {\n    tunnelTokenPreview = {\n      connectionId: connection.id,\n      connectionName: connection.name,\n      token: payload.tunnel_token ?? '',\n      connectCmd: payload.setup_instructions?.connect ?? '',\n      serviceCmd: payload.setup_instructions?.service ?? '',\n    }\n  }\n\n  async function loadTunnels() {\n    tunnelsLoading = true\n    try {\n      tunnels = await apiGet<TunnelConnection[]>('/api/connections')\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      tunnelsLoading = false\n    }\n  }\n\n  async function createTunnel() {\n    const name = tunnelCreateName.trim()\n    if (!name) {\n      toastError('Tunnel name is required')\n      return\n    }\n    tunnelCreateLoading = true\n    try {\n      const res = await apiPost<TunnelTokenResponse>('/api/connections', { name })\n      toastSuccess(`Tunnel \"${name}\" created`)\n      tunnelCreateName = ''\n      await Promise.all([loadTunnels(), loadConnections(), loadStats()])\n      const createdConn = res.connection\n      if (createdConn?.id && createdConn?.name && res.tunnel_token) {\n        setTunnelTokenPreview({ id: createdConn.id, name: createdConn.name }, res)\n      }\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      tunnelCreateLoading = false\n    }\n  }\n\n  function requestDeleteTunnel(conn: TunnelConnection) {\n    tunnelDeleteTarget = conn\n    tunnelDeleteConfirmOpen = true\n  }\n\n  function cancelDeleteTunnel() {\n    tunnelDeleteConfirmOpen = false\n    tunnelDeleteLoading = false\n    tunnelDeleteTarget = null\n  }\n\n  async function confirmDeleteTunnel() {\n    if (!tunnelDeleteTarget) return\n    tunnelDeleteLoading = true\n    const target = tunnelDeleteTarget\n    try {\n      await apiDel(`/api/connections/${encodeURIComponent(target.id)}`)\n      toastSuccess(`Tunnel \"${target.name}\" deleted`)\n      if (tunnelTokenPreview?.connectionId === target.id) {\n        tunnelTokenPreview = null\n      }\n      cancelDeleteTunnel()\n      await Promise.all([loadTunnels(), loadConnections(), loadStats()])\n    } catch (e: any) {\n      tunnelDeleteLoading = false\n      toastError(e.message)\n    }\n  }\n\n  async function viewTunnelToken(conn: TunnelConnection) {\n    try {\n      const res = await apiGet<TunnelTokenResponse>(`/api/connections/${encodeURIComponent(conn.id)}/token`)\n      setTunnelTokenPreview(conn, res)\n      toastSuccess(`Token loaded for \"${conn.name}\"`)\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function regenerateTunnelToken(conn: TunnelConnection) {\n    try {\n      const res = await apiPost<TunnelTokenResponse>(`/api/connections/${encodeURIComponent(conn.id)}/regenerate-token`)\n      setTunnelTokenPreview(conn, res)\n      toastSuccess(res.message || `Token regenerated for \"${conn.name}\"`)\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function copyText(value: string, label: string) {\n    if (!value) {\n      toastError(`${label} is empty`)\n      return\n    }\n    try {\n      await navigator.clipboard.writeText(value)\n      toastSuccess(`${label} copied`)\n    } catch {\n      toastError('Clipboard unavailable')\n    }\n  }\n\n  function switchTab(tab: AdminTab, syncUrl = true) {\n    activeTab = tab\n    if (syncUrl) syncAdminTabParam(tab)\n    if (tab === 'tunnels' && !tunnelsLoading && tunnels.length === 0) {\n      loadTunnels()\n    }\n    if (tab === 'users' && users.length === 0) {\n      refreshUsersTab()\n    }\n    if (tab === 'brain' && !brainLoading && brainProviders.length === 0 && brainSkills.length === 0) {\n      loadBrainAdmin()\n    }\n    if (tab === 'langfuse' && !langfuseLoaded) {\n      loadLangfuseConfig()\n    }\n  }\n\n  async function loadBrainAdmin() {\n    brainLoading = true\n    try {\n      const [providers, models, skills] = await Promise.all([\n        adminListBrainProviders(),\n        adminListBrainModels(),\n        adminListBrainSkills(),\n      ])\n      brainProviders = providers\n      brainModels = models\n      brainSkills = skills\n      if (!modelProviderFilter && providers.length > 0) {\n        modelProviderFilter = providers[0].id\n      } else if (modelProviderFilter && !providers.some(p => p.id === modelProviderFilter)) {\n        modelProviderFilter = providers[0]?.id ?? ''\n      }\n      if (skills.length > 0 && !skillForm.content) {\n        const active = skills.find(s => s.is_active) ?? skills[0]\n        skillForm = {\n          name: active.name,\n          content: active.content,\n          isActive: active.is_active,\n          isDefault: active.is_default,\n        }\n      }\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      brainLoading = false\n    }\n  }\n\n\n  async function createProvider() {\n    try {\n      await adminCreateBrainProvider(providerForm)\n      toastSuccess('Brain provider created')\n      providerForm = { ...providerForm, name: '', apiKey: '', isDefault: false }\n      providerSheetOpen = false\n      await loadBrainAdmin()\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function toggleProvider(provider: BrainProviderAdmin, key: 'is_active' | 'is_default', value: boolean) {\n    try {\n      await adminUpdateBrainProvider(provider.id, {\n        isActive: key === 'is_active' ? value : provider.is_active,\n        isDefault: key === 'is_default' ? value : provider.is_default,\n      })\n      toastSuccess('Provider updated')\n      await loadBrainAdmin()\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function syncProviderModels(provider: BrainProviderAdmin) {\n    try {\n      await adminSyncBrainProviderModels(provider.id)\n      toastSuccess(`Synced models for ${provider.name}. Recommended model auto-selected.`)\n      await loadBrainAdmin()\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  function deleteProvider(provider: BrainProviderAdmin) {\n    deletingProvider = provider\n  }\n\n  async function confirmDeleteProvider() {\n    if (!deletingProvider) return\n    const provider = deletingProvider\n    deletingProvider = null\n    try {\n      await adminDeleteBrainProvider(provider.id)\n      toastSuccess('Provider deleted')\n      await loadBrainAdmin()\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function updateModel(model: BrainModelOption, key: 'is_active' | 'is_default', value: boolean) {\n    try {\n      await adminUpdateBrainModel(model.id, {\n        displayName: model.display_name || model.name,\n        isActive: key === 'is_active' ? value : model.is_active,\n        isDefault: key === 'is_default' ? value : model.is_default,\n      })\n      toastSuccess('Model updated')\n      await loadBrainAdmin()\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  function filteredBrainModels(): BrainModelOption[] {\n    const providerID = modelProviderFilter.trim()\n    const term = modelSearch.trim().toLowerCase()\n    return brainModels.filter(model => {\n      if (providerID && model.provider_id !== providerID) return false\n      if (modelShowOnlyActive && !model.is_active) return false\n      if (!term) return true\n      const candidate = `${model.display_name || ''} ${model.name} ${model.provider_name}`.toLowerCase()\n      return candidate.includes(term)\n    })\n  }\n\n  function modelsForProvider(providerId: string): BrainModelOption[] {\n    const term = modelSearch.trim().toLowerCase()\n    return brainModels.filter(model => {\n      if (model.provider_id !== providerId) return false\n      if (modelShowOnlyActive && !model.is_active) return false\n      if (!term) return true\n      const candidate = `${model.display_name || ''} ${model.name} ${model.provider_name}`.toLowerCase()\n      return candidate.includes(term)\n    })\n  }\n\n  function providerFilterOptions(): ComboboxOption[] {\n    return [\n      { value: '', label: 'All providers' },\n      ...brainProviders.map(provider => ({ value: provider.id, label: provider.name, hint: provider.kind })),\n    ]\n  }\n\n  function visibleProvidersForModels(): BrainProviderAdmin[] {\n    if (!modelProviderFilter) return brainProviders\n    return brainProviders.filter(p => p.id === modelProviderFilter)\n  }\n\n  async function runModelBulkAction(action: 'deactivate_all' | 'activate_all' | 'activate_recommended') {\n    if (!modelProviderFilter) {\n      toastError('Select a provider first')\n      return\n    }\n    try {\n      await adminBulkUpdateBrainModels({ providerId: modelProviderFilter, action })\n      if (action === 'deactivate_all') toastSuccess('All models deactivated for selected provider')\n      if (action === 'activate_all') toastSuccess('All models activated for selected provider')\n      if (action === 'activate_recommended') toastSuccess('Recommended model activated and set as default')\n      await loadBrainAdmin()\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function saveSkill() {\n    try {\n      if (!skillForm.content.trim() || !skillForm.name.trim()) {\n        toastError('Skill name and content are required')\n        return\n      }\n      const active = brainSkills.find(s => s.is_active)\n      if (active) {\n        await adminUpdateBrainSkill(active.id, {\n          name: skillForm.name,\n          content: skillForm.content,\n          isActive: skillForm.isActive,\n          isDefault: skillForm.isDefault,\n        })\n      } else {\n        await adminCreateBrainSkill({\n          name: skillForm.name,\n          content: skillForm.content,\n          isActive: skillForm.isActive,\n          isDefault: skillForm.isDefault,\n        })\n      }\n      toastSuccess('Brain skill saved')\n      skillSheetOpen = false\n      await loadBrainAdmin()\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  function openSkillSheet() {\n    const active = brainSkills.find(s => s.is_active) ?? brainSkills[0]\n    if (active) {\n      skillForm = {\n        name: active.name,\n        content: active.content,\n        isActive: active.is_active,\n        isDefault: active.is_default,\n      }\n    }\n    skillSheetOpen = true\n  }\n\n  async function setRole(username: string, role: string) {\n    if (!username || roleSavingUser === username) return\n    roleSavingUser = username\n    try {\n      await apiPut(`/api/admin/user-roles/${encodeURIComponent(username)}`, { role })\n      userRoles = { ...userRoles, [username]: role }\n      toastSuccess(`Role set to ${role} for ${username}`)\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      if (roleSavingUser === username) roleSavingUser = null\n    }\n  }\n\n  async function removeRole(username: string) {\n    try {\n      await apiDel(`/api/admin/user-roles/${encodeURIComponent(username)}`)\n      const { [username]: _, ...rest } = userRoles\n      userRoles = rest\n      toastSuccess(`Role override removed for ${username}`)\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  // ── Langfuse ──────────────────────────────────────────────────────────\n\n  async function loadLangfuseConfig() {\n    langfuseLoading = true\n    try {\n      const res = await apiGet<{\n        public_key: string\n        base_url: string\n        has_secret_key: boolean\n        enabled: boolean\n      }>('/api/admin/langfuse')\n      langfuseConfig = {\n        publicKey: res.public_key ?? '',\n        baseUrl: res.base_url || 'https://cloud.langfuse.com',\n        hasSecretKey: res.has_secret_key ?? false,\n        enabled: res.enabled ?? false,\n      }\n      langfuseSecretKey = ''\n      langfuseLoaded = true\n    } catch (e: unknown) {\n      toastError(e instanceof Error ? e.message : 'Failed to load Langfuse config')\n    } finally {\n      langfuseLoading = false\n    }\n  }\n\n  async function saveLangfuseConfig() {\n    if (!langfuseConfig.publicKey.trim()) {\n      toastError('Public key is required')\n      return\n    }\n    if (!langfuseConfig.hasSecretKey && !langfuseSecretKey.trim()) {\n      toastError('Secret key is required')\n      return\n    }\n    langfuseSaving = true\n    try {\n      const payload: Record<string, string> = {\n        publicKey: langfuseConfig.publicKey.trim(),\n        baseUrl: langfuseConfig.baseUrl.trim() || 'https://cloud.langfuse.com',\n      }\n      if (langfuseSecretKey.trim()) {\n        payload.secretKey = langfuseSecretKey.trim()\n      }\n      const res = await apiPut<{ enabled: boolean }>('/api/admin/langfuse', payload)\n      langfuseConfig.enabled = res.enabled\n      langfuseConfig.hasSecretKey = true\n      langfuseSecretKey = ''\n      toastSuccess('Langfuse configuration saved')\n    } catch (e: unknown) {\n      toastError(e instanceof Error ? e.message : 'Failed to save')\n    } finally {\n      langfuseSaving = false\n    }\n  }\n\n  async function deleteLangfuseConfig() {\n    try {\n      await apiDel('/api/admin/langfuse')\n      langfuseConfig = {\n        publicKey: '',\n        baseUrl: 'https://cloud.langfuse.com',\n        hasSecretKey: false,\n        enabled: false,\n      }\n      langfuseSecretKey = ''\n      toastSuccess('Langfuse configuration removed')\n    } catch (e: unknown) {\n      toastError(e instanceof Error ? e.message : 'Failed to remove')\n    }\n  }\n\n  async function testLangfuseConnection() {\n    langfuseTesting = true\n    try {\n      const payload: Record<string, string> = {\n        publicKey: langfuseConfig.publicKey.trim(),\n        baseUrl: langfuseConfig.baseUrl.trim() || 'https://cloud.langfuse.com',\n      }\n      if (langfuseSecretKey.trim()) {\n        payload.secretKey = langfuseSecretKey.trim()\n      }\n      const res = await apiPost<{ connected: boolean; error?: string }>('/api/admin/langfuse/test', payload)\n      if (res.connected) {\n        toastSuccess('Connection successful')\n      } else {\n        toastError(`Connection failed: ${res.error ?? 'unknown error'}`)\n      }\n    } catch (e: unknown) {\n      toastError(e instanceof Error ? e.message : 'Connection test failed')\n    } finally {\n      langfuseTesting = false\n    }\n  }\n\n  function formatTime(ts: string): string {\n    try {\n      return new Date(ts).toLocaleString()\n    } catch {\n      return ts\n    }\n  }\n\n  function truncate(s: string, max = 80): string {\n    return s.length > max ? s.slice(0, max) + '...' : s\n  }\n</script>\n\n<div class=\"flex flex-col h-full\">\n  <div class=\"border-b border-gray-200 dark:border-gray-800\">\n    <div class=\"flex flex-col gap-2 px-4 py-3 md:flex-row md:items-center md:gap-4\">\n      <div class=\"flex items-center gap-3\">\n        <Shield size={18} class=\"text-ch-blue\" />\n        <h1 class=\"ds-page-title\">Admin Panel</h1>\n      </div>\n      <nav class=\"ds-tabs border-0 px-0 pt-0 gap-1 overflow-x-auto whitespace-nowrap\" aria-label=\"Admin Tabs\">\n        {#each [['overview', 'Overview'], ['tunnels', 'Tunnels'], ['users', 'Users'], ['brain', 'Brain'], ['langfuse', 'Langfuse']] as [key, label]}\n          <button\n            class=\"ds-tab {activeTab === key ? 'ds-tab-active' : ''}\"\n            onclick={() => switchTab(key as AdminTab)}\n          >\n            {label}\n          </button>\n        {/each}\n      </nav>\n</div>\n</div>\n\n<Sheet\n  open={providerSheetOpen}\n  title=\"Create Brain Provider\"\n  size=\"lg\"\n  onclose={() => providerSheetOpen = false}\n>\n  <form\n    class=\"space-y-4\"\n    onsubmit={(e) => {\n      e.preventDefault()\n      void createProvider()\n    }}\n  >\n    <div class=\"flex items-center gap-2\">\n      <p class=\"text-xs text-gray-500\">Provider controls which model catalog is available to all users.</p>\n      <HelpTip text=\"OpenAI works with managed API keys. OpenAI-compatible is for custom gateways. Ollama usually uses local/base URL endpoints.\" />\n    </div>\n\n    <div class=\"grid grid-cols-1 md:grid-cols-2 gap-3\">\n      <label class=\"space-y-1\">\n        <span class=\"text-xs text-gray-500\">Provider Name</span>\n        <input class=\"ds-input-sm\" placeholder=\"OpenAI Prod\" bind:value={providerForm.name} required />\n      </label>\n      <label class=\"space-y-1\">\n        <span class=\"text-xs text-gray-500\">Provider Kind</span>\n        <Combobox\n          options={providerKindOptions}\n          value={providerForm.kind}\n          onChange={(v) => providerForm = { ...providerForm, kind: v, baseUrl: providerBaseUrls[v] ?? '' }}\n        />\n      </label>\n      <label class=\"space-y-1 md:col-span-2\">\n        <span class=\"text-xs text-gray-500\">Base URL</span>\n        <input\n          class=\"ds-input-sm\"\n          placeholder={providerForm.kind === 'ollama'\n            ? 'http://localhost:11434/v1'\n            : providerForm.kind === 'openai_compatible'\n              ? 'https://your-gateway.example.com'\n              : 'https://api.openai.com/v1'}\n          bind:value={providerForm.baseUrl}\n        />\n      </label>\n      <label class=\"space-y-1 md:col-span-2\">\n        <span class=\"text-xs text-gray-500\">API Key</span>\n        <input class=\"ds-input-sm\" type=\"password\" placeholder=\"sk-...\" bind:value={providerForm.apiKey} />\n      </label>\n    </div>\n\n    <div class=\"flex flex-wrap items-center gap-4\">\n      <label class=\"ds-checkbox-label text-xs\">\n        <input type=\"checkbox\" class=\"ds-checkbox\" bind:checked={providerForm.isActive} />\n        Active\n      </label>\n      <label class=\"ds-checkbox-label text-xs\">\n        <input type=\"checkbox\" class=\"ds-checkbox\" bind:checked={providerForm.isDefault} />\n        Default provider\n      </label>\n    </div>\n\n    <div class=\"flex items-center justify-end gap-2 pt-2 border-t border-gray-200 dark:border-gray-800\">\n      <button type=\"button\" class=\"ds-btn-outline\" onclick={() => providerSheetOpen = false}>Cancel</button>\n      <button type=\"submit\" class=\"ds-btn-primary\" disabled={!providerForm.name.trim()}>Create Provider</button>\n    </div>\n  </form>\n</Sheet>\n\n<Sheet\n  open={skillSheetOpen}\n  title=\"Global Brain Skill\"\n  size=\"xl\"\n  onclose={() => skillSheetOpen = false}\n>\n  <form\n    class=\"space-y-4\"\n    onsubmit={(e) => {\n      e.preventDefault()\n      void saveSkill()\n    }}\n  >\n    <div class=\"flex items-center gap-2\">\n      <p class=\"text-xs text-gray-500\">This prompt steers SQL safety, artifact usage, and tool behavior for every chat.</p>\n      <HelpTip text=\"Keep this instruction set practical: SQL guardrails, artifact expectations, and when to ask clarifying questions.\" />\n    </div>\n\n    <label class=\"space-y-1\">\n      <span class=\"text-xs text-gray-500\">Skill Name</span>\n      <input class=\"ds-input-sm\" bind:value={skillForm.name} required />\n    </label>\n\n    <label class=\"space-y-1\">\n      <span class=\"text-xs text-gray-500\">Skill Content</span>\n      <textarea\n        class=\"ds-input-sm min-h-[58vh] font-mono text-[12px] leading-relaxed resize-y\"\n        bind:value={skillForm.content}\n        placeholder=\"You are Brain, a senior ClickHouse copilot...\"\n      ></textarea>\n    </label>\n\n    <div class=\"flex flex-wrap items-center gap-4\">\n      <label class=\"ds-checkbox-label text-xs\">\n        <input type=\"checkbox\" class=\"ds-checkbox\" bind:checked={skillForm.isActive} />\n        Active\n      </label>\n      <label class=\"ds-checkbox-label text-xs\">\n        <input type=\"checkbox\" class=\"ds-checkbox\" bind:checked={skillForm.isDefault} />\n        Default\n      </label>\n    </div>\n\n    <div class=\"flex items-center justify-end gap-2 pt-2 border-t border-gray-200 dark:border-gray-800\">\n      <button type=\"button\" class=\"ds-btn-outline\" onclick={() => skillSheetOpen = false}>Cancel</button>\n      <button type=\"submit\" class=\"ds-btn-primary\" disabled={!skillForm.name.trim() || !skillForm.content.trim()}>Save Skill</button>\n    </div>\n  </form>\n</Sheet>\n\n<Sheet\n  open={createCHUserSheetOpen}\n  title=\"Create ClickHouse User\"\n  size=\"lg\"\n  onclose={() => createCHUserSheetOpen = false}\n>\n  <form\n    class=\"space-y-4\"\n    onsubmit={(e) => {\n      e.preventDefault()\n      void createClickHouseUser()\n    }}\n  >\n    <div class=\"flex items-center gap-2\">\n      <p class=\"text-xs text-gray-500\">Create users directly in ClickHouse for the active connection.</p>\n      <HelpTip text=\"Use strong passwords. The selected auth type controls how ClickHouse stores and verifies credentials.\" />\n    </div>\n\n    <div class=\"grid grid-cols-1 md:grid-cols-2 gap-3\">\n      <label class=\"space-y-1\">\n        <span class=\"text-xs text-gray-500\">Username</span>\n        <input class=\"ds-input-sm\" placeholder=\"analytics_reader\" bind:value={createCHUserForm.name} required />\n      </label>\n      <label class=\"space-y-1\">\n        <span class=\"text-xs text-gray-500\">Auth Type</span>\n        <Combobox\n          options={clickHouseAuthTypeOptions}\n          value={createCHUserForm.authType}\n          onChange={(v) => createCHUserForm = { ...createCHUserForm, authType: v }}\n        />\n      </label>\n      <label class=\"space-y-1 md:col-span-2\">\n        <span class=\"text-xs text-gray-500\">Password</span>\n        <input\n          class=\"ds-input-sm\"\n          type=\"password\"\n          placeholder={createCHUserForm.authType === 'no_password' ? 'Not required for no_password' : 'Enter password'}\n          bind:value={createCHUserForm.password}\n          disabled={createCHUserForm.authType === 'no_password'}\n        />\n      </label>\n      <label class=\"space-y-1 md:col-span-2\">\n        <span class=\"text-xs text-gray-500\">Default Roles (optional)</span>\n        <input class=\"ds-input-sm\" placeholder=\"role_reader, role_writer\" bind:value={createCHUserForm.defaultRoles} />\n      </label>\n    </div>\n\n    <label class=\"ds-checkbox-label text-xs\">\n      <input type=\"checkbox\" class=\"ds-checkbox\" bind:checked={createCHUserForm.ifNotExists} />\n      Use IF NOT EXISTS\n    </label>\n\n    <div class=\"ds-panel-muted p-3\">\n      <p class=\"text-xs font-semibold text-gray-700 dark:text-gray-200 mb-1\">Command Preview</p>\n      <pre class=\"text-[11px] max-h-36 overflow-auto whitespace-pre-wrap break-all text-gray-600 dark:text-gray-300\">{buildCreateCHUserCommandPreview()}</pre>\n    </div>\n\n    {#if createCHUserErrorMessage}\n      <div class=\"rounded-lg border border-red-500/40 bg-red-500/10 p-3\">\n        <p class=\"text-xs font-semibold text-red-200 mb-1\">Create User Error</p>\n        <pre class=\"text-[11px] whitespace-pre-wrap break-words max-h-36 overflow-auto text-red-100\">{createCHUserErrorMessage}</pre>\n      </div>\n    {/if}\n\n    {#if createCHUserExecutedCommands.length > 0}\n      <div class=\"ds-panel p-3 space-y-2\">\n        <p class=\"text-xs font-semibold text-gray-700 dark:text-gray-200\">Executed Commands</p>\n        {#each createCHUserExecutedCommands as sql}\n          <pre class=\"text-[11px] max-h-24 overflow-auto whitespace-pre-wrap break-all text-gray-600 dark:text-gray-300\">{sql}</pre>\n        {/each}\n      </div>\n    {/if}\n\n    <div class=\"flex items-center justify-end gap-2 pt-2 border-t border-gray-200 dark:border-gray-800\">\n      <button type=\"button\" class=\"ds-btn-outline\" onclick={() => createCHUserSheetOpen = false}>Cancel</button>\n      <button type=\"submit\" class=\"ds-btn-primary\" disabled={!createCHUserForm.name.trim()}>Create User</button>\n    </div>\n  </form>\n</Sheet>\n\n<Sheet\n  open={editCHUserPasswordSheetOpen}\n  title=\"Change ClickHouse User Password\"\n  size=\"md\"\n  onclose={() => editCHUserPasswordSheetOpen = false}\n>\n  <form\n    class=\"space-y-4\"\n    onsubmit={(e) => {\n      e.preventDefault()\n      void updateClickHouseUserPassword()\n    }}\n  >\n    <div class=\"ds-panel-muted p-3\">\n      <p class=\"text-xs text-gray-500\">User</p>\n      <p class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">{selectedCHUserName || '—'}</p>\n    </div>\n\n    <label class=\"space-y-1\">\n      <span class=\"text-xs text-gray-500\">Auth Type</span>\n      <Combobox\n        options={clickHouseAuthTypeOptions}\n        value={updateCHUserPasswordForm.authType}\n        onChange={(v) => updateCHUserPasswordForm = { ...updateCHUserPasswordForm, authType: v }}\n      />\n    </label>\n\n    <label class=\"space-y-1\">\n      <span class=\"text-xs text-gray-500\">New Password</span>\n      <input\n        class=\"ds-input-sm\"\n        type=\"password\"\n        placeholder={updateCHUserPasswordForm.authType === 'no_password' ? 'Not required for no_password' : 'Enter new password'}\n        bind:value={updateCHUserPasswordForm.password}\n        disabled={updateCHUserPasswordForm.authType === 'no_password'}\n      />\n    </label>\n\n    <label class=\"ds-checkbox-label text-xs\">\n      <input type=\"checkbox\" class=\"ds-checkbox\" bind:checked={updateCHUserPasswordForm.ifExists} />\n      Use IF EXISTS\n    </label>\n\n    <div class=\"flex items-center justify-end gap-2 pt-2 border-t border-gray-200 dark:border-gray-800\">\n      <button type=\"button\" class=\"ds-btn-outline\" onclick={() => editCHUserPasswordSheetOpen = false}>Cancel</button>\n      <button type=\"submit\" class=\"ds-btn-primary\" disabled={!selectedCHUserName}>Save Password</button>\n    </div>\n  </form>\n</Sheet>\n\n<ConfirmDialog\n  open={deleteCHUserConfirmOpen}\n  title=\"Delete ClickHouse user?\"\n  description={selectedCHUserName ? `Delete \"${selectedCHUserName}\" from ClickHouse? This cannot be undone.` : 'This action cannot be undone.'}\n  confirmLabel=\"Delete User\"\n  destructive={true}\n  loading={deleteCHUserLoading}\n  onconfirm={confirmDeleteCHUser}\n  oncancel={cancelDeleteCHUser}\n/>\n\n<ConfirmDialog\n  open={tunnelDeleteConfirmOpen}\n  title=\"Delete tunnel?\"\n  description={tunnelDeleteTarget ? `Delete \"${tunnelDeleteTarget.name}\"? This disconnects the agent and cannot be undone.` : 'This action cannot be undone.'}\n  confirmLabel=\"Delete Tunnel\"\n  destructive={true}\n  loading={tunnelDeleteLoading}\n  onconfirm={confirmDeleteTunnel}\n  oncancel={cancelDeleteTunnel}\n/>\n\n<ConfirmDialog\n  open={deletingProvider !== null}\n  title=\"Delete provider?\"\n  description={deletingProvider ? `Delete \"${deletingProvider.name}\" and all its models? This cannot be undone.` : ''}\n  confirmLabel=\"Delete Provider\"\n  destructive\n  onconfirm={confirmDeleteProvider}\n  oncancel={() => deletingProvider = null}\n/>\n\n  <!-- Content -->\n  <div class=\"flex-1 overflow-auto p-4\">\n    {#if activeTab === 'overview'}\n      <!-- Stats cards -->\n      {#if statsLoading}\n        <div class=\"flex items-center justify-center py-12\"><Spinner /></div>\n      {:else if stats}\n\t        <div class=\"grid grid-cols-2 md:grid-cols-4 gap-4 mb-6\">\n\t          <div class=\"ds-stat-card\">\n\t            <div class=\"flex items-center gap-2 text-gray-500 text-xs mb-1\"><Users size={14} /> Users</div>\n\t            <div class=\"text-2xl font-bold text-gray-900 dark:text-gray-100\">{stats.users_count}</div>\n\t          </div>\n\t          <div class=\"ds-stat-card\">\n\t            <div class=\"flex items-center gap-2 text-gray-500 text-xs mb-1\"><Database size={14} /> Connections</div>\n\t            <div class=\"text-2xl font-bold text-gray-900 dark:text-gray-100\">{stats.online} <span class=\"text-sm font-normal text-gray-500\">/ {stats.connections}</span></div>\n\t          </div>\n\t          <div class=\"ds-stat-card\">\n\t            <div class=\"flex items-center gap-2 text-gray-500 text-xs mb-1\"><Activity size={14} /> Queries</div>\n\t            <div class=\"text-2xl font-bold text-gray-900 dark:text-gray-100\">{stats.query_count}</div>\n\t          </div>\n\t          <div class=\"ds-stat-card\">\n\t            <div class=\"flex items-center gap-2 text-gray-500 text-xs mb-1\"><LogIn size={14} /> Logins</div>\n\t            <div class=\"text-2xl font-bold text-gray-900 dark:text-gray-100\">{stats.login_count}</div>\n\t          </div>\n\t        </div>\n      {/if}\n\n      <!-- Connections -->\n      <h2 class=\"text-sm font-semibold text-gray-700 dark:text-gray-300 mb-2\">Connections</h2>\n      {#if connections.length === 0}\n        <p class=\"text-sm text-gray-500\">No connections found</p>\n      {:else}\n\t        <div class=\"flex flex-col gap-2\">\n\t          {#each connections as conn}\n\t            <div class=\"ds-panel flex items-center gap-3 p-3\">\n\t              <span class=\"w-2 h-2 rounded-full {conn.online ? 'bg-green-500' : 'bg-gray-400'}\"></span>\n\t              <span class=\"text-sm font-medium text-gray-800 dark:text-gray-200\">{conn.name}</span>\n\t              <span class=\"text-xs text-gray-500\">{conn.id}</span>\n              <span class=\"ml-auto text-xs {conn.online ? 'text-green-600' : 'text-gray-400'}\">{conn.online ? 'Online' : 'Offline'}</span>\n            </div>\n          {/each}\n        </div>\n      {/if}\n\n    {:else if activeTab === 'tunnels'}\n      <div class=\"flex flex-wrap items-end gap-2 mb-3\">\n        <label class=\"space-y-1\">\n          <span class=\"text-xs text-gray-500\">Tunnel Name</span>\n          <input\n            class=\"ds-input-sm w-72\"\n            placeholder=\"warehouse-prod\"\n            bind:value={tunnelCreateName}\n            onkeydown={(e) => e.key === 'Enter' && createTunnel()}\n          />\n        </label>\n        <button\n          class=\"ds-btn-primary\"\n          onclick={() => createTunnel()}\n          disabled={tunnelCreateLoading || !tunnelCreateName.trim()}\n        >\n          <Plus size={14} />\n          {tunnelCreateLoading ? 'Creating...' : 'Create Tunnel'}\n        </button>\n        <button class=\"ds-btn-ghost\" onclick={() => loadTunnels()} title=\"Refresh tunnels\">\n          <RefreshCw size={14} />\n        </button>\n      </div>\n\n      {#if tunnelTokenPreview}\n        {@const preview = tunnelTokenPreview}\n        <div class=\"ds-panel p-3 mb-3 space-y-2\">\n          <div class=\"flex flex-wrap items-center gap-2\">\n            <p class=\"text-xs font-semibold text-gray-700 dark:text-gray-200\">Latest Token: {preview.connectionName}</p>\n            <button class=\"ds-btn-outline px-2.5 py-1.5\" onclick={() => copyText(preview.token, 'Tunnel token')}>\n              <Copy size={12} />\n              Copy token\n            </button>\n          </div>\n          <pre class=\"text-[11px] p-2 rounded border border-gray-200 dark:border-gray-800 bg-gray-100 dark:bg-gray-900 overflow-x-auto\">{preview.token}</pre>\n          {#if preview.connectCmd}\n            <div>\n              <p class=\"text-[11px] text-gray-500 mb-1\">Connect command</p>\n              <div class=\"flex gap-2\">\n                <pre class=\"flex-1 text-[11px] p-2 rounded border border-gray-200 dark:border-gray-800 bg-gray-100 dark:bg-gray-900 overflow-x-auto\">{preview.connectCmd}</pre>\n                <button class=\"ds-btn-outline px-2.5 py-1.5\" onclick={() => copyText(preview.connectCmd, 'Connect command')}>\n                  <Copy size={12} />\n                </button>\n              </div>\n            </div>\n          {/if}\n          {#if preview.serviceCmd}\n            <div>\n              <p class=\"text-[11px] text-gray-500 mb-1\">Service command</p>\n              <div class=\"flex gap-2\">\n                <pre class=\"flex-1 text-[11px] p-2 rounded border border-gray-200 dark:border-gray-800 bg-gray-100 dark:bg-gray-900 overflow-x-auto\">{preview.serviceCmd}</pre>\n                <button class=\"ds-btn-outline px-2.5 py-1.5\" onclick={() => copyText(preview.serviceCmd, 'Service command')}>\n                  <Copy size={12} />\n                </button>\n              </div>\n            </div>\n          {/if}\n        </div>\n      {/if}\n\n      {#if tunnelsLoading}\n        <div class=\"flex items-center justify-center py-12\"><Spinner /></div>\n      {:else if tunnels.length === 0}\n        <p class=\"text-sm text-gray-500\">No tunnels configured</p>\n      {:else}\n        <div class=\"ds-table-wrap\">\n          <table class=\"ds-table\">\n            <thead>\n              <tr class=\"ds-table-head-row\">\n                <th class=\"ds-table-th\">Name</th>\n                <th class=\"ds-table-th\">ID</th>\n                <th class=\"ds-table-th\">Status</th>\n                <th class=\"ds-table-th\">Host</th>\n                <th class=\"ds-table-th\">Created</th>\n                <th class=\"ds-table-th-right\">Actions</th>\n              </tr>\n            </thead>\n            <tbody>\n              {#each tunnels as conn}\n                <tr class=\"ds-table-row\">\n                  <td class=\"ds-td-strong\">{conn.name}</td>\n                  <td class=\"ds-td-mono\">{conn.id}</td>\n                  <td class=\"ds-td\">\n                    <span class=\"inline-flex items-center gap-2\">\n                      <span class=\"w-2 h-2 rounded-full {conn.online ? 'bg-green-500' : 'bg-gray-400'}\"></span>\n                      <span class={conn.online ? 'text-green-600' : 'text-gray-500'}>{conn.online ? 'Online' : 'Offline'}</span>\n                    </span>\n                  </td>\n                  <td class=\"ds-td-mono\">{conn.host_info?.hostname || '—'}</td>\n                  <td class=\"ds-td-mono whitespace-nowrap\">{formatTime(conn.created_at)}</td>\n                  <td class=\"ds-td-right\">\n                    <div class=\"flex justify-end gap-2\">\n                      <button class=\"ds-btn-outline px-2.5 py-1.5\" onclick={() => viewTunnelToken(conn)}>Token</button>\n                      <button class=\"ds-btn-outline px-2.5 py-1.5\" onclick={() => regenerateTunnelToken(conn)}>Regenerate</button>\n                      <button\n                        class=\"ds-btn-outline px-2.5 py-1.5 border-red-300/80 text-red-600 hover:text-red-700 hover:border-red-500\"\n                        onclick={() => requestDeleteTunnel(conn)}\n                      >\n                        <Trash2 size={12} />\n                        Delete\n                      </button>\n                    </div>\n                  </td>\n                </tr>\n              {/each}\n            </tbody>\n          </table>\n        </div>\n      {/if}\n\n    {:else if activeTab === 'users'}\n      {#if usersLoading}\n        <div class=\"flex items-center justify-center py-12\"><Spinner /></div>\n      {:else}\n        <div class=\"flex items-center gap-2 mb-2\">\n          <h2 class=\"text-sm font-semibold text-gray-700 dark:text-gray-300\">Application Users</h2>\n          <HelpTip text=\"Admin access is explicit in CH-UI (role override). ClickHouse grants alone do not grant Admin UI actions. Safety rule: the last admin override cannot be removed.\" />\n        </div>\n        {#if !usersSyncCheck}\n          <div class=\"mb-2 rounded-md border border-amber-500/30 bg-amber-500/10 px-3 py-2 text-xs text-amber-700 dark:text-amber-300\">\n            User sync check is unavailable right now (connection offline or auth issue). Application users list may include stale session users.\n          </div>\n        {/if}\n        {#if users.length === 0}\n          <p class=\"text-sm text-gray-500 mb-4\">No users found</p>\n        {:else}\n\t          <div class=\"ds-table-wrap mb-6\">\n\t            <table class=\"ds-table\">\n              <thead>\n                <tr class=\"ds-table-head-row\">\n                  <th class=\"ds-table-th\">Username</th>\n                  <th class=\"ds-table-th\">Role</th>\n                  <th class=\"ds-table-th\">Last Login</th>\n                  <th class=\"ds-table-th-right\">Actions</th>\n                </tr>\n              </thead>\n              <tbody>\n                {#each users as user}\n                  <tr class=\"ds-table-row\">\n\t                    <td class=\"ds-td-mono\">{user.username}</td>\n\t                    <td class=\"ds-td\">\n                      <div class=\"inline-flex items-center rounded-lg border border-gray-300/80 dark:border-gray-700/80 bg-gray-100/70 dark:bg-gray-900/65 p-1\">\n                        {#each roleOptions as roleOpt}\n                          <button\n                            type=\"button\"\n                            class=\"px-2.5 h-7 rounded-md text-xs transition-colors disabled:opacity-60 disabled:cursor-not-allowed\n                              {(userRoles[user.username] ?? user.user_role ?? 'viewer') === roleOpt.value\n                                ? 'bg-orange-100 dark:bg-orange-500/15 text-ch-orange'\n                                : 'text-gray-500 hover:text-gray-700 dark:hover:text-gray-300'}\"\n                            disabled={(userRoles[user.username] ?? user.user_role ?? 'viewer') === roleOpt.value || roleSavingUser === user.username}\n                            onclick={() => setRole(user.username, roleOpt.value)}\n                          >\n                            {roleOpt.label}\n                          </button>\n                        {/each}\n                      </div>\n                    </td>\n\t                    <td class=\"ds-td-mono\">{user.last_login ? formatTime(user.last_login) : '—'}</td>\n\t                    <td class=\"ds-td-right\">\n                      {#if userRoles[user.username]}\n                        <button\n                          class=\"text-xs text-red-500 hover:text-red-700\"\n                          onclick={() => removeRole(user.username)}\n                        >Remove Override</button>\n                      {/if}\n                    </td>\n                  </tr>\n                {/each}\n              </tbody>\n            </table>\n          </div>\n        {/if}\n\n        <div class=\"flex flex-col md:flex-row md:items-center md:justify-between gap-2 mb-2\">\n          <div class=\"flex items-center gap-2\">\n            <h2 class=\"text-sm font-semibold text-gray-700 dark:text-gray-300\">ClickHouse Users</h2>\n            <HelpTip text=\"Direct user management in ClickHouse. Create users, rotate passwords, and remove users without leaving CH-UI.\" />\n          </div>\n          <div class=\"flex items-center gap-2\">\n            <button class=\"ds-btn-outline\" onclick={() => refreshUsersTab()} title=\"Refresh\">\n              <RefreshCw size={14} />\n            </button>\n            <button class=\"ds-btn-primary\" onclick={() => openCreateCHUserSheet()}>\n              <UserPlus size={14} />\n              New User\n            </button>\n          </div>\n        </div>\n\n        {#if chUsers.length === 0}\n          <div class=\"ds-empty\">\n            <p class=\"text-sm text-gray-500 mb-2\">No ClickHouse users loaded</p>\n            <button class=\"ds-btn-primary\" onclick={() => openCreateCHUserSheet()}>\n              <UserPlus size={14} />\n              Create First User\n            </button>\n          </div>\n        {:else}\n          <div class=\"ds-table-wrap rounded border border-gray-200 dark:border-gray-800 max-h-[42vh] overflow-auto\">\n            <table class=\"ds-table min-w-[980px]\">\n              <thead>\n                <tr class=\"ds-table-head-row sticky top-0 bg-gray-50 dark:bg-gray-900 z-10\">\n                  <th class=\"ds-table-th\">Name</th>\n                  <th class=\"ds-table-th\">Auth Type</th>\n                  <th class=\"ds-table-th\">Storage</th>\n                  <th class=\"ds-table-th\">Default Roles</th>\n                  <th class=\"ds-table-th-right\">Actions</th>\n                </tr>\n              </thead>\n              <tbody>\n                {#each chUsers as row}\n                  <tr class=\"ds-table-row\">\n                    <td class=\"ds-td-mono\">{row.name}</td>\n                    <td class=\"ds-td-mono\">{row.auth_type ?? '—'}</td>\n                    <td class=\"ds-td-mono\">{row.storage ?? '—'}</td>\n                    <td class=\"ds-td-mono truncate max-w-xs\">{formatCHDefaultRoles(row)}</td>\n                    <td class=\"ds-td-right\">\n                      <div class=\"flex justify-end gap-2\">\n                        <button\n                          class=\"ds-btn-outline px-2.5 py-1.5\"\n                          onclick={() => openUpdateCHUserPasswordSheet(row.name, row.auth_type)}\n                        >\n                          <KeyRound size={12} />\n                          Change Password\n                        </button>\n                        <button\n                          class=\"ds-btn-outline px-2.5 py-1.5 border-red-300/80 text-red-600 hover:text-red-700 hover:border-red-500\"\n                          onclick={() => openDeleteCHUserConfirm(row.name)}\n                        >\n                          <Trash2 size={12} />\n                          Delete\n                        </button>\n                      </div>\n                    </td>\n                  </tr>\n                {/each}\n              </tbody>\n            </table>\n          </div>\n        {/if}\n      {/if}\n\n    {:else if activeTab === 'brain'}\n      {#if brainLoading}\n        <div class=\"flex items-center justify-center py-12\"><Spinner /></div>\n      {:else}\n        <div class=\"flex flex-col md:flex-row md:items-center md:justify-between gap-3 mb-4\">\n          <div class=\"flex items-center gap-2\">\n            <Brain size={16} class=\"text-ch-blue\" />\n            <h2 class=\"text-sm font-semibold text-gray-700 dark:text-gray-300\">Brain Control Center</h2>\n            <HelpTip text=\"Manage AI providers and model availability for all users. Use provider accordions to keep large model lists manageable.\" />\n          </div>\n          <div class=\"flex flex-wrap items-center gap-2\">\n            <button class=\"ds-btn-outline\" onclick={() => providerSheetOpen = true}>Add Provider</button>\n            <button class=\"ds-btn-outline\" onclick={() => openSkillSheet()}>Edit Global Skill</button>\n            <button class=\"ds-btn-outline\" onclick={() => loadBrainAdmin()} title=\"Refresh\">\n              <RefreshCw size={14} />\n            </button>\n          </div>\n        </div>\n\n        <div class=\"grid grid-cols-2 md:grid-cols-4 gap-2 mb-4\">\n          <div class=\"ds-panel p-2.5\">\n            <div class=\"text-[11px] text-gray-500\">Providers</div>\n            <div class=\"text-lg font-semibold text-gray-900 dark:text-gray-100\">{brainProviders.length}</div>\n          </div>\n          <div class=\"ds-panel p-2.5\">\n            <div class=\"text-[11px] text-gray-500\">Active Providers</div>\n            <div class=\"text-lg font-semibold text-gray-900 dark:text-gray-100\">{brainProviders.filter(p => p.is_active).length}</div>\n          </div>\n          <div class=\"ds-panel p-2.5\">\n            <div class=\"text-[11px] text-gray-500\">Models</div>\n            <div class=\"text-lg font-semibold text-gray-900 dark:text-gray-100\">{brainModels.length}</div>\n          </div>\n          <div class=\"ds-panel p-2.5\">\n            <div class=\"text-[11px] text-gray-500\">Active Models</div>\n            <div class=\"text-lg font-semibold text-gray-900 dark:text-gray-100\">{brainModels.filter(m => m.is_active).length}</div>\n          </div>\n        </div>\n\n        {#if brainProviders.length === 0}\n          <div class=\"ds-empty\">\n            <p class=\"text-sm text-gray-500 mb-2\">No Brain providers configured yet.</p>\n            <button class=\"ds-btn-primary\" onclick={() => providerSheetOpen = true}>Create First Provider</button>\n          </div>\n        {:else}\n          <div class=\"ds-table-wrap mb-5 max-h-[32vh] overflow-auto rounded-lg border border-gray-200 dark:border-gray-800\">\n            <table class=\"ds-table\">\n              <thead>\n                <tr class=\"ds-table-head-row sticky top-0 bg-gray-50 dark:bg-gray-900 z-10\">\n                  <th class=\"ds-table-th\">Provider</th>\n                  <th class=\"ds-table-th\">Kind</th>\n                  <th class=\"ds-table-th\">Base URL</th>\n                  <th class=\"ds-table-th\">Key</th>\n                  <th class=\"ds-table-th\">Active</th>\n                  <th class=\"ds-table-th\">Default</th>\n                  <th class=\"ds-table-th-right\">Actions</th>\n                </tr>\n              </thead>\n              <tbody>\n                {#each brainProviders as provider}\n                  <tr class=\"ds-table-row\">\n                    <td class=\"ds-td-strong\">{provider.name}</td>\n                    <td class=\"ds-td-mono\">{provider.kind}</td>\n                    <td class=\"ds-td-mono max-w-sm truncate\">{provider.base_url || '—'}</td>\n                    <td class=\"ds-td\">{provider.has_api_key ? 'Configured' : 'Missing'}</td>\n                    <td class=\"ds-td\">\n                      <input\n                        type=\"checkbox\"\n                        class=\"ds-checkbox\"\n                        checked={provider.is_active}\n                        onchange={(e) => toggleProvider(provider, 'is_active', (e.target as HTMLInputElement).checked)}\n                      />\n                    </td>\n                    <td class=\"ds-td\">\n                      <input\n                        type=\"radio\"\n                        class=\"ds-radio\"\n                        name=\"default-brain-provider\"\n                        checked={provider.is_default}\n                        onchange={() => toggleProvider(provider, 'is_default', true)}\n                      />\n                    </td>\n                    <td class=\"ds-td-right\">\n                      <div class=\"flex justify-end gap-2\">\n                        <button class=\"ds-btn-outline\" onclick={() => syncProviderModels(provider)}>Sync Models</button>\n                        <button class=\"text-xs text-red-500 hover:text-red-700\" onclick={() => deleteProvider(provider)}>Delete</button>\n                      </div>\n                    </td>\n                  </tr>\n                {/each}\n              </tbody>\n            </table>\n          </div>\n        {/if}\n\n        <div class=\"ds-panel p-3 mb-3\">\n          <div class=\"flex items-center gap-2 mb-2\">\n            <h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">Brain Models</h3>\n            <HelpTip text=\"Models are grouped by provider. Expand a provider accordion to activate/deactivate models and choose defaults.\" />\n          </div>\n          <div class=\"grid grid-cols-1 md:grid-cols-4 gap-2\">\n            <Combobox\n              options={providerFilterOptions()}\n              value={modelProviderFilter}\n              onChange={(v) => modelProviderFilter = v}\n            />\n            <input\n              class=\"ds-input-sm md:col-span-2\"\n              placeholder=\"Search models...\"\n              bind:value={modelSearch}\n            />\n            <label class=\"ds-checkbox-label text-xs px-2\">\n              <input type=\"checkbox\" class=\"ds-checkbox\" bind:checked={modelShowOnlyActive} />\n              Show only active\n            </label>\n          </div>\n          <div class=\"mt-2 flex flex-wrap items-center gap-2\">\n            <button class=\"ds-btn-outline\" onclick={() => runModelBulkAction('activate_recommended')}>Activate Recommended</button>\n            <button class=\"ds-btn-outline\" onclick={() => runModelBulkAction('activate_all')}>Activate All</button>\n            <button class=\"ds-btn-outline\" onclick={() => runModelBulkAction('deactivate_all')}>Deactivate All</button>\n          </div>\n        </div>\n\n        {#if brainModels.length === 0}\n          <p class=\"text-sm text-gray-500 mb-6\">No models synced yet.</p>\n        {:else}\n          <div class=\"space-y-2 mb-6\">\n            {#each visibleProvidersForModels() as provider}\n              {@const providerModels = modelsForProvider(provider.id)}\n              <details class=\"ds-card overflow-hidden\" open={modelProviderFilter === provider.id || (!modelProviderFilter && provider.is_default)}>\n                <summary class=\"cursor-pointer list-none px-3 py-2.5 flex items-center justify-between bg-gray-50 dark:bg-gray-900\">\n                  <div class=\"flex items-center gap-2\">\n                    <span class=\"font-medium text-gray-900 dark:text-gray-100\">{provider.name}</span>\n                    <span class=\"text-[11px] text-gray-500\">{provider.kind}</span>\n                  </div>\n                  <div class=\"flex items-center gap-2 text-xs\">\n                    <span class=\"ds-badge ds-badge-neutral\">{providerModels.filter(m => m.is_active).length} active</span>\n                    <span class=\"text-gray-500\">{providerModels.length} total</span>\n                  </div>\n                </summary>\n                <div class=\"max-h-[36vh] overflow-auto border-t border-gray-200 dark:border-gray-800\">\n                  {#if providerModels.length > 0}\n                    <table class=\"ds-table\">\n                      <thead>\n                        <tr class=\"ds-table-head-row sticky top-0 bg-gray-50 dark:bg-gray-900 z-10\">\n                          <th class=\"ds-table-th\">Model</th>\n                          <th class=\"ds-table-th\">Active</th>\n                          <th class=\"ds-table-th\">Default</th>\n                        </tr>\n                      </thead>\n                      <tbody>\n                        {#each providerModels as model}\n                          <tr class=\"ds-table-row\">\n                            <td class=\"ds-td-mono\">{model.display_name || model.name}</td>\n                            <td class=\"ds-td\">\n                              <input\n                                type=\"checkbox\"\n                                class=\"ds-checkbox\"\n                                checked={model.is_active}\n                                onchange={(e) => updateModel(model, 'is_active', (e.target as HTMLInputElement).checked)}\n                              />\n                            </td>\n                            <td class=\"ds-td\">\n                              <input\n                                type=\"radio\"\n                                class=\"ds-radio\"\n                                name={\"default-model-\" + model.provider_id}\n                                checked={model.is_default}\n                                onchange={() => updateModel(model, 'is_default', true)}\n                              />\n                            </td>\n                          </tr>\n                        {/each}\n                      </tbody>\n                    </table>\n                  {:else}\n                    <p class=\"text-xs text-gray-500 px-3 py-4\">No models match current filters for this provider.</p>\n                  {/if}\n                </div>\n              </details>\n            {/each}\n          </div>\n        {/if}\n\n        <div class=\"ds-card p-3\">\n          <div class=\"flex items-center justify-between mb-2\">\n            <h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">Global Brain Skill</h3>\n            <button class=\"ds-btn-outline\" onclick={() => openSkillSheet()}>Open Skill Sheet</button>\n          </div>\n          <p class=\"text-xs text-gray-500 mb-2\">Active prompt preview</p>\n          <pre class=\"text-[11px] leading-relaxed whitespace-pre-wrap text-gray-600 dark:text-gray-300 max-h-36 overflow-auto rounded border border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-900 p-2\">{truncate(skillForm.content || '', 1200)}</pre>\n        </div>\n      {/if}\n\n    {:else if activeTab === 'langfuse'}\n      {#if langfuseLoading}\n        <div class=\"flex items-center justify-center py-12\"><Spinner /></div>\n      {:else}\n        <div class=\"flex flex-col md:flex-row md:items-center md:justify-between gap-3 mb-4\">\n          <div class=\"flex items-center gap-2\">\n            <Telescope size={16} class=\"text-ch-blue\" />\n            <h2 class=\"text-sm font-semibold text-gray-700 dark:text-gray-300\">Langfuse Observability</h2>\n          </div>\n          <div class=\"flex items-center gap-2\">\n            {#if langfuseConfig.enabled}\n              <span class=\"inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-green-100 text-green-800 dark:bg-green-900/30 dark:text-green-400\">Active</span>\n            {:else}\n              <span class=\"inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-gray-100 text-gray-600 dark:bg-gray-800 dark:text-gray-400\">Inactive</span>\n            {/if}\n          </div>\n        </div>\n\n        <div class=\"ds-card p-4 mb-4\">\n          <p class=\"text-xs text-gray-500 mb-4\">\n            <a href=\"https://langfuse.com\" target=\"_blank\" rel=\"noopener\" class=\"text-ch-blue hover:underline\">Langfuse</a> provides LLM observability for Brain chat — traces, token usage, latency, and auto-scoring for every generation.\n          </p>\n\n          <div class=\"space-y-3\">\n            <label class=\"block space-y-1\">\n              <span class=\"text-xs text-gray-500\">Base URL</span>\n              <input\n                class=\"ds-input-sm\"\n                type=\"url\"\n                placeholder=\"https://cloud.langfuse.com\"\n                bind:value={langfuseConfig.baseUrl}\n              />\n            </label>\n\n            <label class=\"block space-y-1\">\n              <span class=\"text-xs text-gray-500\">Public Key</span>\n              <input\n                class=\"ds-input-sm font-mono\"\n                type=\"text\"\n                placeholder=\"pk-lf-...\"\n                bind:value={langfuseConfig.publicKey}\n              />\n            </label>\n\n            <label class=\"block space-y-1\">\n              <span class=\"text-xs text-gray-500\">Secret Key</span>\n              <input\n                class=\"ds-input-sm font-mono\"\n                type=\"password\"\n                placeholder={langfuseConfig.hasSecretKey ? '(unchanged — enter new value to replace)' : 'sk-lf-...'}\n                bind:value={langfuseSecretKey}\n              />\n            </label>\n          </div>\n\n          <div class=\"flex flex-wrap items-center gap-2 mt-4 pt-3 border-t border-gray-200 dark:border-gray-800\">\n            <button\n              class=\"ds-btn-primary\"\n              onclick={() => saveLangfuseConfig()}\n              disabled={langfuseSaving}\n            >\n              {langfuseSaving ? 'Saving...' : 'Save'}\n            </button>\n            <button\n              class=\"ds-btn-outline\"\n              onclick={() => testLangfuseConnection()}\n              disabled={langfuseTesting}\n            >\n              {langfuseTesting ? 'Testing...' : 'Test Connection'}\n            </button>\n            {#if langfuseConfig.hasSecretKey || langfuseConfig.publicKey}\n              <button\n                class=\"ds-btn-outline text-red-600 dark:text-red-400\"\n                onclick={() => deleteLangfuseConfig()}\n              >\n                Remove\n              </button>\n            {/if}\n          </div>\n        </div>\n      {/if}\n    {/if}\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/pages/Brain.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount, tick } from 'svelte'\n  import { openQueryTab } from '../lib/stores/tabs.svelte'\n  import { success as toastSuccess, error as toastError } from '../lib/stores/toast.svelte'\n  import { getDatabases, loadDatabases, loadTables, loadColumns } from '../lib/stores/schema.svelte'\n  import type { BrainArtifact, BrainChat, BrainMessage as BrainMessageType, BrainModelOption, SchemaContextEntry } from '../lib/types/brain'\n  import type { ComboboxOption } from '../lib/components/common/Combobox.svelte'\n  import {\n    createBrainChat,\n    deleteBrainChat,\n    listBrainArtifacts,\n    listBrainChats,\n    listBrainMessages,\n    listBrainModels,\n    runBrainQueryArtifact,\n    streamBrainMessage,\n    updateBrainChat,\n  } from '../lib/api/brain'\n\n  import BrainSidebar from '../lib/components/brain/BrainSidebar.svelte'\n  import BrainHeader from '../lib/components/brain/BrainHeader.svelte'\n  import BrainMessage from '../lib/components/brain/BrainMessage.svelte'\n  import BrainInput from '../lib/components/brain/BrainInput.svelte'\n  import BrainEmptyState from '../lib/components/brain/BrainEmptyState.svelte'\n  import ConfirmDialog from '../lib/components/common/ConfirmDialog.svelte'\n  import InputDialog from '../lib/components/common/InputDialog.svelte'\n\n  const MAX_CONTEXTS = 10\n\n  // ── State ──────────────────────────────────────────────────\n  let loading = $state(true)\n  let chats = $state<BrainChat[]>([])\n  let models = $state<BrainModelOption[]>([])\n  let selectedChatId = $state<string>('')\n  let messages = $state<BrainMessageType[]>([])\n  let artifacts = $state<BrainArtifact[]>([])\n  let input = $state('')\n  let streaming = $state(false)\n  let selectedModelId = $state('')\n  let messagesEl: HTMLDivElement | undefined = $state()\n  let runningSql = $state<string | null>(null)\n\n  // Multi-context state\n  let contexts = $state<SchemaContextEntry[]>([])\n  let headerDb = $state('')\n  let headerTable = $state('')\n\n  // Dialog state\n  let renamingChat = $state<BrainChat | null>(null)\n  let renameValue = $state('')\n  let deletingChat = $state<BrainChat | null>(null)\n\n  // ── Derived ────────────────────────────────────────────────\n  const artifactsByMessageId = $derived.by(() => {\n    const map = new Map<string, BrainArtifact[]>()\n    for (const art of artifacts) {\n      const key = art.message_id ?? '__orphan__'\n      const list = map.get(key) ?? []\n      list.push(art)\n      map.set(key, list)\n    }\n    return map\n  })\n\n  const databaseOptions = $derived.by<ComboboxOption[]>(() =>\n    getDatabases().map(db => ({\n      value: db.name,\n      label: db.name,\n      keywords: db.name,\n    }))\n  )\n\n  const tableOptions = $derived.by<ComboboxOption[]>(() => {\n    const db = getDatabases().find(d => d.name === headerDb)\n    const tables = db?.tables?.map(t => t.name) ?? []\n    return tables.map(t => ({\n      value: t,\n      label: t,\n      hint: headerDb,\n      keywords: `${headerDb}.${t}`,\n    }))\n  })\n\n  // ── Lifecycle ──────────────────────────────────────────────\n  onMount(async () => {\n    const dbs = getDatabases()\n    if (dbs.length === 0) await loadDatabases()\n\n    await Promise.all([loadModels(), loadChats()])\n    if (!selectedChatId) {\n      await createChat('New Chat')\n    }\n    loading = false\n  })\n\n  // ── API functions ──────────────────────────────────────────\n  async function loadModels() {\n    try {\n      models = await listBrainModels()\n      const def = models.find(m => m.is_default) ?? models[0]\n      if (def && !selectedModelId) selectedModelId = def.id\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function loadChats() {\n    try {\n      chats = await listBrainChats(false)\n      if (chats.length > 0 && !selectedChatId) {\n        await selectChat(chats[0].id)\n      }\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function createChat(title = 'New Chat') {\n    try {\n      const chat = await createBrainChat({ title, modelId: selectedModelId || undefined })\n      chats = [chat, ...chats]\n      await selectChat(chat.id)\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function selectChat(chatId: string) {\n    selectedChatId = chatId\n    const chat = chats.find(c => c.id === chatId)\n    if (chat?.model_id) selectedModelId = chat.model_id\n\n    // Restore contexts\n    await restoreContexts(chat)\n\n    try {\n      const [msgs, arts] = await Promise.all([\n        listBrainMessages(chatId),\n        listBrainArtifacts(chatId),\n      ])\n      messages = msgs\n      artifacts = arts\n      await tick()\n      scrollToBottom()\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function restoreContexts(chat: BrainChat | undefined) {\n    if (!chat) {\n      contexts = []\n      return\n    }\n\n    // Try new multi-context format first\n    if (chat.context_tables) {\n      try {\n        const parsed = JSON.parse(chat.context_tables) as { database: string; table: string }[]\n        const restored: SchemaContextEntry[] = []\n        for (const entry of parsed) {\n          const cols = await resolveColumns(entry.database, entry.table)\n          restored.push({ database: entry.database, table: entry.table, columns: cols })\n        }\n        contexts = restored\n        return\n      } catch {\n        // fall through to legacy\n      }\n    }\n\n    // Legacy single-context fallback\n    if (chat.context_database && chat.context_table) {\n      const cols = await resolveColumns(chat.context_database, chat.context_table)\n      contexts = [{ database: chat.context_database, table: chat.context_table, columns: cols }]\n    } else {\n      contexts = []\n    }\n  }\n\n  async function resolveColumns(dbName: string, tableName: string): Promise<{ name: string; type: string }[]> {\n    const db = getDatabases().find(d => d.name === dbName)\n    if (!db?.tables) await loadTables(dbName)\n    await loadColumns(dbName, tableName)\n    const freshDb = getDatabases().find(d => d.name === dbName)\n    const table = freshDb?.tables?.find(t => t.name === tableName)\n    return (table?.columns ?? []).map(c => ({ name: c.name, type: c.type }))\n  }\n\n  function renameChat(chat: BrainChat) {\n    renameValue = chat.title\n    renamingChat = chat\n  }\n\n  async function confirmRename(newTitle: string) {\n    if (!renamingChat) return\n    const chat = renamingChat\n    renamingChat = null\n    try {\n      await updateBrainChat(chat.id, { title: newTitle })\n      chats = chats.map(c => c.id === chat.id ? { ...c, title: newTitle } : c)\n      if (selectedChatId === chat.id) {\n        await selectChat(chat.id)\n      }\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  function removeChat(chat: BrainChat) {\n    deletingChat = chat\n  }\n\n  async function confirmDelete() {\n    if (!deletingChat) return\n    const chat = deletingChat\n    deletingChat = null\n    try {\n      await deleteBrainChat(chat.id)\n      chats = chats.filter(c => c.id !== chat.id)\n      if (selectedChatId === chat.id) {\n        selectedChatId = ''\n        messages = []\n        artifacts = []\n        if (chats.length > 0) {\n          await selectChat(chats[0].id)\n        } else {\n          await createChat('New Chat')\n        }\n      }\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  // ── Multi-context management ──────────────────────────────\n  async function addContext(dbName: string, tableName: string) {\n    // Dedupe check\n    if (contexts.some(c => c.database === dbName && c.table === tableName)) return\n\n    if (contexts.length >= MAX_CONTEXTS) {\n      toastError(`Maximum ${MAX_CONTEXTS} table contexts allowed`)\n      return\n    }\n\n    const cols = await resolveColumns(dbName, tableName)\n    contexts = [...contexts, { database: dbName, table: tableName, columns: cols }]\n    persistContexts()\n  }\n\n  function removeContext(dbName: string, tableName: string) {\n    contexts = contexts.filter(c => !(c.database === dbName && c.table === tableName))\n    persistContexts()\n  }\n\n  function clearAllContexts() {\n    contexts = []\n    persistContexts()\n  }\n\n  function persistContexts() {\n    if (!selectedChatId) return\n    const serialized = contexts.length > 0\n      ? JSON.stringify(contexts.map(c => ({ database: c.database, table: c.table })))\n      : ''\n    updateBrainChat(selectedChatId, { contextTables: serialized }).catch(() => {})\n    chats = chats.map(c => c.id === selectedChatId ? { ...c, context_tables: serialized || null } : c)\n  }\n\n  // Header combobox handlers — additive workflow\n  async function onHeaderDbChange(dbName: string) {\n    headerDb = dbName\n    headerTable = ''\n    if (dbName) {\n      const db = getDatabases().find(d => d.name === dbName)\n      if (!db?.tables) await loadTables(dbName)\n    }\n  }\n\n  async function onHeaderTableChange(tableName: string) {\n    if (headerDb && tableName) {\n      await addContext(headerDb, tableName)\n      // Reset comboboxes after adding\n      headerDb = ''\n      headerTable = ''\n    } else {\n      headerTable = tableName\n    }\n  }\n\n  // ── Chat actions ───────────────────────────────────────────\n  async function sendMessage() {\n    if (!input.trim() || streaming) return\n\n    if (!selectedChatId) {\n      await createChat('New Chat')\n      if (!selectedChatId) return\n    }\n\n    const userPrompt = input.trim()\n    input = ''\n\n    const tempUser: BrainMessageType = {\n      id: `tmp-user-${Date.now()}`,\n      chat_id: selectedChatId,\n      role: 'user',\n      content: userPrompt,\n      status: 'complete',\n      created_at: new Date().toISOString(),\n      updated_at: new Date().toISOString(),\n    }\n    const tempAssistant: BrainMessageType = {\n      id: `tmp-assistant-${Date.now()}`,\n      chat_id: selectedChatId,\n      role: 'assistant',\n      content: '',\n      status: 'streaming',\n      created_at: new Date().toISOString(),\n      updated_at: new Date().toISOString(),\n    }\n\n    messages = [...messages, tempUser, tempAssistant]\n    const assistantIdx = messages.length - 1\n\n    // Build multi-schema contexts for the API\n    const schemaContexts = contexts.length > 0\n      ? contexts.map(c => ({\n          database: c.database,\n          table: c.table,\n          columns: c.columns,\n        }))\n      : undefined\n\n    streaming = true\n    await tick()\n    scrollToBottom()\n\n    try {\n      await streamBrainMessage(\n        selectedChatId,\n        {\n          content: userPrompt,\n          modelId: selectedModelId || undefined,\n          schemaContexts,\n        },\n        (event) => {\n          if (event.type === 'delta') {\n            const delta = event.delta ?? ''\n            messages = messages.map((m, i) => i === assistantIdx ? { ...m, content: m.content + delta } : m)\n          } else if (event.type === 'error') {\n            messages = messages.map((m, i) => i === assistantIdx ? { ...m, content: m.content || `Error: ${event.error ?? 'Unknown error'}`, status: 'error' } : m)\n          }\n        },\n      )\n\n      await Promise.all([\n        selectChat(selectedChatId),\n        loadChats(),\n      ])\n    } catch (e: any) {\n      messages = messages.map((m, i) => i === assistantIdx ? { ...m, content: m.content || `Error: ${e.message}`, status: 'error' } : m)\n    } finally {\n      streaming = false\n      await tick()\n      scrollToBottom()\n    }\n  }\n\n  async function runSqlInChat(sql: string, messageId?: string) {\n    if (!selectedChatId) return\n    runningSql = sql\n    try {\n      await runBrainQueryArtifact(selectedChatId, {\n        query: sql,\n        title: 'Query Result',\n        messageId,\n      })\n      toastSuccess('Query executed and saved as artifact')\n      artifacts = await listBrainArtifacts(selectedChatId)\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      runningSql = null\n    }\n  }\n\n  function scrollToBottom() {\n    if (messagesEl) messagesEl.scrollTop = messagesEl.scrollHeight\n  }\n</script>\n\n<div class=\"flex h-full\">\n  <BrainSidebar\n    {chats}\n    {selectedChatId}\n    {loading}\n    onSelectChat={(id) => selectChat(id)}\n    onCreateChat={() => createChat('New Chat')}\n    onRenameChat={renameChat}\n    onDeleteChat={removeChat}\n  />\n\n  <main class=\"flex-1 flex flex-col min-w-0\">\n    <BrainHeader\n      {models}\n      {selectedModelId}\n      selectedDb={headerDb}\n      selectedTable={headerTable}\n      {databaseOptions}\n      {tableOptions}\n      onModelChange={(v) => selectedModelId = v}\n      onDbChange={(v) => { void onHeaderDbChange(v) }}\n      onTableChange={(v) => { void onHeaderTableChange(v) }}\n    />\n\n    <div class=\"flex-1 overflow-auto p-6 space-y-5\" bind:this={messagesEl}>\n      {#if messages.length === 0}\n        <BrainEmptyState />\n      {:else}\n        {#each messages as msg, i (msg.id)}\n          <BrainMessage\n            message={msg}\n            artifacts={artifactsByMessageId.get(msg.id) ?? []}\n            {streaming}\n            isLastMessage={i === messages.length - 1}\n            {runningSql}\n            onRunSql={runSqlInChat}\n            onOpenInEditor={(sql) => openQueryTab(sql)}\n          />\n        {/each}\n      {/if}\n    </div>\n\n    <BrainInput\n      value={input}\n      {streaming}\n      {contexts}\n      onSend={sendMessage}\n      onInput={(v) => input = v}\n      onAddContext={addContext}\n      onRemoveContext={removeContext}\n      onClearAllContexts={clearAllContexts}\n    />\n  </main>\n</div>\n\n<InputDialog\n  open={renamingChat !== null}\n  title=\"Rename chat\"\n  placeholder=\"Chat name\"\n  bind:value={renameValue}\n  onconfirm={confirmRename}\n  oncancel={() => renamingChat = null}\n/>\n\n<ConfirmDialog\n  open={deletingChat !== null}\n  title=\"Delete chat\"\n  description={`Are you sure you want to delete \"${deletingChat?.title}\"? This cannot be undone.`}\n  confirmLabel=\"Delete\"\n  destructive\n  onconfirm={confirmDelete}\n  oncancel={() => deletingChat = null}\n/>\n"
  },
  {
    "path": "ui/src/pages/Dashboards.svelte",
    "content": "<script lang=\"ts\">\n  import type { Dashboard, Panel } from '../lib/types/api'\n  import { apiGet, apiPost, apiPut, apiDel } from '../lib/api/client'\n  import { success as toastSuccess, error as toastError } from '../lib/stores/toast.svelte'\n  import { openDashboardTab, openSingletonTab } from '../lib/stores/tabs.svelte'\n  import { toDashboardTimeRangePayload } from '../lib/utils/dashboard-time'\n  import Button from '../lib/components/common/Button.svelte'\n  import Spinner from '../lib/components/common/Spinner.svelte'\n  import Sheet from '../lib/components/common/Sheet.svelte'\n  import ConfirmDialog from '../lib/components/common/ConfirmDialog.svelte'\n  import PanelEditor from '../lib/components/dashboard/PanelEditor.svelte'\n  import DashboardGrid from '../lib/components/dashboard/DashboardGrid.svelte'\n  import TimeRangeSelector from '../lib/components/dashboard/TimeRangeSelector.svelte'\n  import { LayoutDashboard, Plus, Trash2, ArrowLeft, RefreshCw } from 'lucide-svelte'\n\n  interface Props {\n    dashboardId?: string\n  }\n\n  let { dashboardId }: Props = $props()\n\n  // List view\n  let dashboards = $state<Dashboard[]>([])\n  let listLoading = $state(true)\n\n  // Detail view\n  let currentDashboard = $state<Dashboard | null>(null)\n  let panels = $state<Panel[]>([])\n  let panelResults = $state<Map<string, { data: any[]; meta: any[]; error?: string; loading: boolean }>>(new Map())\n  let detailLoading = $state(false)\n  let detailError = $state<string | null>(null)\n  let loadedDashboardId = $state<string | null>(null)\n  let dashboardTimeRange = $state(localStorage.getItem('ch-ui-dashboard-time-range') ?? '1h')\n\n  // Create dashboard sheet\n  let showCreateModal = $state(false)\n  let createName = $state('')\n  let createDesc = $state('')\n  let creating = $state(false)\n\n  // Panel editor page\n  let panelEditorOpen = $state(false)\n  let editingPanel = $state<Panel | null>(null)\n  let confirmOpen = $state(false)\n  let confirmLoading = $state(false)\n  let confirmTitle = $state('')\n  let confirmDescription = $state('')\n  let confirmTargetDashboardId = $state<string | null>(null)\n  let confirmTargetPanelId = $state<string | null>(null)\n\n  // Inline edit\n  let editingTitle = $state(false)\n  let titleInput = $state('')\n\n  $effect(() => {\n    const id = dashboardId\n    if (!id) {\n      if (loadedDashboardId !== null) {\n        currentDashboard = null\n        panels = []\n        panelResults = new Map()\n        loadedDashboardId = null\n      }\n      void loadDashboards()\n      return\n    }\n    if (loadedDashboardId === id) return\n    loadedDashboardId = id\n    void loadDashboardDetail(id)\n  })\n\n  async function loadDashboards() {\n    listLoading = true\n    try {\n      const res = await apiGet<{ dashboards: Dashboard[] }>('/api/dashboards')\n      dashboards = res.dashboards ?? []\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      listLoading = false\n    }\n  }\n\n  async function createDashboard() {\n    if (!createName.trim()) {\n      toastError('Name is required')\n      return\n    }\n    creating = true\n    try {\n      const res = await apiPost<{ dashboard: Dashboard }>('/api/dashboards', {\n        name: createName.trim(),\n        description: createDesc.trim(),\n      })\n      showCreateModal = false\n      createName = ''\n      createDesc = ''\n      await loadDashboards()\n      if (res.dashboard) {\n        openDashboardTab(res.dashboard.id, res.dashboard.name)\n      }\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      creating = false\n    }\n  }\n\n  async function deleteDashboard(id: string) {\n    try {\n      await apiDel(`/api/dashboards/${id}`)\n      dashboards = dashboards.filter(d => d.id !== id)\n      toastSuccess('Dashboard deleted')\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function loadDashboardDetail(id: string) {\n    detailLoading = true\n    detailError = null\n    panelResults = new Map()\n    try {\n      const res = await apiGet<{ dashboard: Dashboard; panels: Panel[] }>(`/api/dashboards/${id}`)\n      currentDashboard = res.dashboard\n      panels = res.panels ?? []\n      runAllPanelQueries(res.panels ?? [])\n    } catch (e: any) {\n      detailError = e.message\n      toastError(e.message)\n    } finally {\n      detailLoading = false\n    }\n  }\n\n  function openDashboardFromList(d: Dashboard) {\n    openDashboardTab(d.id, d.name)\n  }\n\n  function openDashboardListTab() {\n    openSingletonTab('dashboards', 'Dashboards')\n  }\n\n  function runAllPanelQueries(panelsToRun = panels) {\n    for (const p of panelsToRun) runPanelQuery(p)\n  }\n\n  async function runPanelQuery(p: Panel) {\n    const updated = new Map(panelResults)\n    updated.set(p.id, { data: [], meta: [], loading: true })\n    panelResults = updated\n\n    const rangeValue = dashboardTimeRange || '1h'\n\n    try {\n      const res = await apiPost<{ data: any[]; meta: any[]; error?: string }>('/api/dashboards/query', {\n        query: p.query,\n        time_range: toDashboardTimeRangePayload(rangeValue),\n      })\n      const next = new Map(panelResults)\n      next.set(p.id, { data: res.data ?? [], meta: res.meta ?? [], loading: false })\n      panelResults = next\n    } catch (e: any) {\n      const next = new Map(panelResults)\n      next.set(p.id, { data: [], meta: [], error: e.message, loading: false })\n      panelResults = next\n    }\n  }\n\n  function handleTimeRangeChange(nextRange: string) {\n    dashboardTimeRange = nextRange\n    localStorage.setItem('ch-ui-dashboard-time-range', nextRange)\n    runAllPanelQueries()\n  }\n\n  async function saveDashboardTitle() {\n    if (!currentDashboard || !titleInput.trim()) return\n    try {\n      await apiPut(`/api/dashboards/${currentDashboard.id}`, { name: titleInput.trim() })\n      currentDashboard = { ...currentDashboard, name: titleInput.trim() }\n      editingTitle = false\n      toastSuccess('Dashboard renamed')\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  function openAddPanel() {\n    editingPanel = null\n    panelEditorOpen = true\n  }\n\n  function openEditPanel(p: Panel) {\n    editingPanel = p\n    panelEditorOpen = true\n  }\n\n  function handlePanelSaved(savedPanel: Panel) {\n    if (editingPanel?.id) {\n      panels = panels.map(p => p.id === editingPanel!.id ? savedPanel : p)\n    } else {\n      panels = [...panels, savedPanel]\n    }\n    runPanelQuery(savedPanel)\n    panelEditorOpen = false\n    toastSuccess(editingPanel?.id ? 'Panel updated' : 'Panel created')\n  }\n\n  async function deletePanel(panelId: string) {\n    if (!currentDashboard) return\n    try {\n      await apiDel(`/api/dashboards/${currentDashboard.id}/panels/${panelId}`)\n      panels = panels.filter(p => p.id !== panelId)\n      toastSuccess('Panel deleted')\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  function requestDeleteDashboard(id: string) {\n    confirmTargetDashboardId = id\n    confirmTargetPanelId = null\n    confirmTitle = 'Delete dashboard?'\n    confirmDescription = 'This will permanently remove the dashboard and all its panels.'\n    confirmOpen = true\n  }\n\n  function requestDeletePanel(id: string) {\n    confirmTargetDashboardId = null\n    confirmTargetPanelId = id\n    confirmTitle = 'Delete panel?'\n    confirmDescription = 'This panel will be permanently removed from the dashboard.'\n    confirmOpen = true\n  }\n\n  async function confirmDelete() {\n    if (confirmLoading) return\n    confirmLoading = true\n    try {\n      if (confirmTargetDashboardId) {\n        await deleteDashboard(confirmTargetDashboardId)\n      } else if (confirmTargetPanelId) {\n        await deletePanel(confirmTargetPanelId)\n      }\n      confirmOpen = false\n      confirmTargetDashboardId = null\n      confirmTargetPanelId = null\n    } finally {\n      confirmLoading = false\n    }\n  }\n\n  function formatTime(ts: string): string {\n    try {\n      return new Date(ts).toLocaleString()\n    } catch {\n      return ts\n    }\n  }\n</script>\n\n<div class=\"flex flex-col h-full\">\n  {#if !dashboardId}\n    <div class=\"flex items-center justify-between px-4 py-3 border-b border-gray-200 dark:border-gray-800\">\n      <div class=\"flex items-center gap-3\">\n        <LayoutDashboard size={18} class=\"text-ch-blue\" />\n        <h1 class=\"text-lg font-semibold text-gray-900 dark:text-gray-100\">Dashboards</h1>\n      </div>\n      <Button size=\"sm\" onclick={() => { showCreateModal = true }}>\n        <Plus size={14} /> Create Dashboard\n      </Button>\n    </div>\n\n    <div class=\"flex-1 overflow-auto p-4\">\n      {#if listLoading}\n        <div class=\"flex items-center justify-center py-12\"><Spinner /></div>\n      {:else if dashboards.length === 0}\n        <div class=\"text-center py-12 text-gray-500\">\n          <LayoutDashboard size={36} class=\"mx-auto mb-2 text-gray-300 dark:text-gray-700\" />\n          <p class=\"mb-1\">No dashboards yet</p>\n          <p class=\"text-xs text-gray-400 dark:text-gray-600\">Create a dashboard to visualize your ClickHouse data</p>\n        </div>\n      {:else}\n        <div class=\"grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4\">\n          {#each dashboards as dashboard (dashboard.id)}\n            <div\n              class=\"bg-gray-50 dark:bg-gray-900 border border-gray-200 dark:border-gray-800 rounded-lg p-4 hover:border-gray-300 dark:hover:border-gray-700 transition-colors cursor-pointer group\"\n              onclick={() => openDashboardFromList(dashboard)}\n              role=\"button\"\n              tabindex=\"0\"\n              onkeydown={(e) => { if (e.key === 'Enter') openDashboardFromList(dashboard) }}\n            >\n              <div class=\"flex items-start justify-between\">\n                <div class=\"flex-1 min-w-0\">\n                  <h3 class=\"text-sm font-medium text-gray-800 dark:text-gray-200 truncate\">{dashboard.name}</h3>\n                  {#if dashboard.description}\n                    <p class=\"text-xs text-gray-500 mt-1 truncate\">{dashboard.description}</p>\n                  {/if}\n                </div>\n                <button\n                  class=\"p-1.5 rounded text-gray-400 hover:text-red-400 hover:bg-gray-200 dark:hover:bg-gray-800 opacity-0 group-hover:opacity-100 transition-opacity\"\n                  onclick={(e) => { e.stopPropagation(); requestDeleteDashboard(dashboard.id) }}\n                  title=\"Delete\"\n                >\n                  <Trash2 size={14} />\n                </button>\n              </div>\n              <div class=\"flex items-center gap-3 mt-3 text-xs text-gray-400\">\n                <span>by {dashboard.created_by}</span>\n                <span>{formatTime(dashboard.updated_at)}</span>\n              </div>\n            </div>\n          {/each}\n        </div>\n      {/if}\n    </div>\n  {:else}\n    <div class=\"flex items-center gap-3 px-4 py-3 border-b border-gray-200 dark:border-gray-800\">\n      <button\n        class=\"p-1.5 rounded text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 hover:bg-gray-200 dark:hover:bg-gray-800\"\n        onclick={openDashboardListTab}\n        title=\"Back to dashboard list\"\n      >\n        <ArrowLeft size={16} />\n      </button>\n\n      {#if editingTitle}\n        <input\n          type=\"text\"\n          class=\"text-lg font-semibold bg-transparent border-b border-ch-blue text-gray-900 dark:text-gray-100 outline-none\"\n          bind:value={titleInput}\n          onkeydown={(e) => { if (e.key === 'Enter') saveDashboardTitle(); if (e.key === 'Escape') editingTitle = false }}\n          onblur={saveDashboardTitle}\n        />\n      {:else}\n        <h1\n          class=\"text-lg font-semibold text-gray-900 dark:text-gray-100 cursor-pointer hover:text-ch-blue\"\n          ondblclick={() => { editingTitle = true; titleInput = currentDashboard?.name ?? '' }}\n          title=\"Double-click to rename\"\n        >\n          {currentDashboard?.name ?? 'Dashboard'}\n        </h1>\n      {/if}\n\n      {#if currentDashboard?.description}\n        <span class=\"text-xs text-gray-500 truncate max-w-[32ch]\">{currentDashboard.description}</span>\n      {/if}\n\n      <div class=\"ml-auto flex items-center gap-2\">\n        <TimeRangeSelector value={dashboardTimeRange} onchange={handleTimeRangeChange} />\n        {#if panelEditorOpen}\n          <span class=\"text-xs text-gray-500 dark:text-gray-400\">\n            Panel builder mode\n          </span>\n        {:else}\n          <Button size=\"sm\" variant=\"secondary\" onclick={() => runAllPanelQueries()}>\n            <RefreshCw size={14} /> Refresh\n          </Button>\n          <Button size=\"sm\" onclick={openAddPanel}>\n            <Plus size={14} /> Add Panel\n          </Button>\n        {/if}\n      </div>\n    </div>\n\n    <div class=\"flex-1 min-h-0 {panelEditorOpen ? 'overflow-hidden' : 'overflow-auto p-4'}\">\n      {#if panelEditorOpen}\n        <PanelEditor\n          dashboardId={currentDashboard?.id ?? ''}\n          dashboardTimeRange={dashboardTimeRange}\n          panel={editingPanel}\n          onclose={() => panelEditorOpen = false}\n          onsave={handlePanelSaved}\n        />\n      {:else if detailLoading}\n        <div class=\"flex items-center justify-center py-12\"><Spinner /></div>\n      {:else if detailError}\n        <div class=\"text-sm text-red-500 bg-red-100/20 dark:bg-red-900/20 border border-red-300/50 dark:border-red-800/50 rounded-lg p-3\">{detailError}</div>\n      {:else if currentDashboard}\n        <DashboardGrid\n          dashboardId={currentDashboard.id}\n          {panels}\n          {panelResults}\n          onpanelschange={(updated) => { panels = updated }}\n          oneditpanel={openEditPanel}\n          ondeletepanel={requestDeletePanel}\n        />\n      {/if}\n    </div>\n  {/if}\n</div>\n\n<Sheet open={showCreateModal} title=\"Create Dashboard\" size=\"sm\" onclose={() => showCreateModal = false}>\n  <div class=\"flex flex-col gap-3\">\n    <div>\n      <label for=\"dashboard-create-name\" class=\"block text-xs font-medium text-gray-700 dark:text-gray-300 mb-1\">Name</label>\n      <input\n        id=\"dashboard-create-name\"\n        type=\"text\"\n        class=\"w-full text-sm bg-transparent border border-gray-300 dark:border-gray-700 rounded px-3 py-2 text-gray-800 dark:text-gray-200\"\n        placeholder=\"My Dashboard\"\n        bind:value={createName}\n      />\n    </div>\n    <div>\n      <label for=\"dashboard-create-description\" class=\"block text-xs font-medium text-gray-700 dark:text-gray-300 mb-1\">Description</label>\n      <input\n        id=\"dashboard-create-description\"\n        type=\"text\"\n        class=\"w-full text-sm bg-transparent border border-gray-300 dark:border-gray-700 rounded px-3 py-2 text-gray-800 dark:text-gray-200\"\n        placeholder=\"Optional description\"\n        bind:value={createDesc}\n      />\n    </div>\n    <div class=\"flex justify-end gap-2 pt-2\">\n      <Button variant=\"secondary\" size=\"sm\" onclick={() => showCreateModal = false}>Cancel</Button>\n      <Button size=\"sm\" loading={creating} onclick={createDashboard}>Create</Button>\n    </div>\n  </div>\n</Sheet>\n\n<ConfirmDialog\n  open={confirmOpen}\n  title={confirmTitle}\n  description={confirmDescription}\n  confirmLabel=\"Delete\"\n  destructive={true}\n  loading={confirmLoading}\n  onconfirm={confirmDelete}\n  oncancel={() => confirmOpen = false}\n/>\n"
  },
  {
    "path": "ui/src/pages/Governance.svelte",
    "content": "<script lang=\"ts\">\n\timport { onMount } from 'svelte';\n\timport {\n\t\tRefreshCw,\n\t\tDatabase,\n\t\tTable2,\n\t\tColumns3,\n\t\tUsers,\n\t\tSearch,\n\t\tGitBranch,\n\t\tShield,\n\t\tAlertTriangle,\n\t\tPlus,\n\t\tEdit,\n\t\tTrash2,\n\t\tPanelRightOpen,\n\t\tChevronRight,\n\t\tChevronDown,\n\t\tMessageSquare,\n\t\tSiren,\n\t\tBell,\n\t\tSettings as SettingsIcon,\n\t\tInfo,\n\t\tX,\n\t\tLock\n\t} from 'lucide-svelte';\n\timport Spinner from '../lib/components/common/Spinner.svelte';\n\timport Combobox from '../lib/components/common/Combobox.svelte';\n\timport type { ComboboxOption } from '../lib/components/common/Combobox.svelte';\n\timport ConfirmDialog from '../lib/components/common/ConfirmDialog.svelte';\n\timport Sheet from '../lib/components/common/Sheet.svelte';\n\timport HelpTip from '../lib/components/common/HelpTip.svelte';\n\timport LineageGraphView from '../lib/components/governance/LineageGraph.svelte';\n\timport MiniTrendChart from '../lib/components/common/MiniTrendChart.svelte';\n\timport { success as toastSuccess, error as toastError } from '../lib/stores/toast.svelte';\n\timport {\n\t\tfetchOverview,\n\t\tfetchDatabases,\n\t\tfetchTables,\n\t\tfetchTableDetail,\n\t\tfetchQueryLog,\n\t\tfetchTopQueries,\n\t\tfetchLineageGraph,\n\t\tfetchViewGraph,\n\t\tfetchQueryByQueryID,\n\t\tfetchAccessUsers,\n\t\tfetchAccessRoles,\n\t\tfetchAccessMatrix,\n\t\tfetchOverPermissions,\n\t\tfetchPolicies,\n\t\tcreatePolicy as apiCreatePolicy,\n\t\tupdatePolicy as apiUpdatePolicy,\n\t\tdeletePolicy as apiDeletePolicy,\n\t\tfetchViolations,\n\t\tpromoteViolationToIncident,\n\t\tfetchIncidents,\n\t\tcreateIncident as apiCreateIncident,\n\t\tupdateIncident as apiUpdateIncident,\n\t\tgetIncident as apiGetIncident,\n\t\tfetchIncidentComments,\n\t\tcreateIncidentComment as apiCreateIncidentComment,\n\t\tfetchTableNotes,\n\t\tcreateTableNote as apiCreateTableNote,\n\t\tdeleteObjectNote as apiDeleteObjectNote,\n\t\ttriggerSync,\n\t\tfetchGovernanceSettings,\n\t\tupdateGovernanceSettings\n\t} from '../lib/api/governance';\n\timport { apiGet } from '../lib/api/client';\n\timport type { AlertChannel, AlertChannelType, AlertEvent, AlertRule } from '../lib/types/alerts';\n\timport {\n\t\tadminListAlertChannels,\n\t\tadminCreateAlertChannel,\n\t\tadminUpdateAlertChannel,\n\t\tadminDeleteAlertChannel,\n\t\tadminTestAlertChannel,\n\t\tadminListAlertRules,\n\t\tadminCreateAlertRule,\n\t\tadminUpdateAlertRule,\n\t\tadminDeleteAlertRule,\n\t\tadminListAlertEvents,\n\t} from '../lib/api/alerts';\n\timport type { AlertRuleRoutePayload } from '../lib/api/alerts';\n\timport type { AuditLog } from '../lib/types/api';\n\timport type {\n\t\tGovernanceOverview,\n\t\tGovDatabase,\n\t\tGovTable,\n\t\tGovColumn,\n\t\tQueryLogEntry,\n\t\tTopQuery,\n\t\tLineageEdge,\n\t\tLineageGraph,\n\t\tChUser,\n\t\tChRole,\n\t\tAccessMatrixEntry,\n\t\tOverPermission,\n\t\tPolicy,\n\t\tPolicyViolation,\n\t\tGovernanceIncident,\n\t\tGovernanceIncidentComment,\n\t\tGovernanceObjectComment,\n\t\tSyncState,\n\t\tGovernanceSettings\n\t} from '../lib/types/governance';\n\n\t// State\n\ttype GovernanceTab = 'dashboard' | 'tables' | 'queries' | 'lineage' | 'viewgraph' | 'access' | 'incidents' | 'policies' | 'querylog' | 'alerts' | 'auditlog' | 'settings';\n\ttype OverPermissionGroup = {\n\t\tuserName: string;\n\t\talerts: OverPermission[];\n\t\ttotal: number;\n\t\tdatabases: number;\n\t\tcritical: number;\n\t\twarn: number;\n\t\tinfo: number;\n\t\ttopSeverity: 'critical' | 'warn' | 'info';\n\t};\n\tconst governanceTabs: Array<{ id: GovernanceTab; label: string }> = [\n\t\t{ id: 'dashboard', label: 'Dashboard' },\n\t\t{ id: 'tables', label: 'Tables' },\n\t\t{ id: 'queries', label: 'Query Audit' },\n\t\t{ id: 'lineage', label: 'Lineage' },\n\t\t{ id: 'viewgraph', label: 'View Graph' },\n\t\t{ id: 'access', label: 'Access' },\n\t\t{ id: 'incidents', label: 'Incidents' },\n\t\t{ id: 'policies', label: 'Policies' },\n\t\t{ id: 'querylog', label: 'Query Log' },\n\t\t{ id: 'alerts', label: 'Alerts' },\n\t\t{ id: 'auditlog', label: 'Audit Log' },\n\t\t{ id: 'settings', label: 'Settings' },\n\t];\n\n\tlet activeTab = $state<GovernanceTab>('dashboard');\n\tlet loading = $state<boolean>(false);\n\tlet syncing = $state<boolean>(false);\n\n\t// Sync settings (opt-in toggle + upgrade banner)\n\tlet govSettings = $state<GovernanceSettings | null>(null);\n\tlet govSettingsLoading = $state<boolean>(false);\n\tlet govToggleSaving = $state<boolean>(false);\n\tlet showEnableConfirm = $state<boolean>(false);\n\tlet showDisableConfirm = $state<boolean>(false);\n\n\t// Dashboard data\n\tlet overview = $state<GovernanceOverview | null>(null);\n\n\t// Tables data\n\tlet databases = $state<GovDatabase[]>([]);\n\tlet tables = $state<GovTable[]>([]);\n\tlet selectedDatabase = $state<string>('');\n\tlet tableSearchQuery = $state<string>('');\n\tlet tableDetailSheetOpen = $state<boolean>(false);\n\tlet selectedTable = $state<GovTable | null>(null);\n\tlet selectedTableColumns = $state<GovColumn[]>([]);\n\tlet tableDetailLoading = $state<boolean>(false);\n\tlet tableNotes = $state<GovernanceObjectComment[]>([]);\n\tlet tableNoteDraft = $state<string>('');\n\n\t// Query Audit data\n\tlet queryLog = $state<QueryLogEntry[]>([]);\n\tlet topQueries = $state<TopQuery[]>([]);\n\tlet queryUserFilter = $state<string>('');\n\tlet queryLimit = $state<number>(100);\n\tlet queryDetailSheetOpen = $state<boolean>(false);\n\tlet selectedQuery = $state<QueryLogEntry | null>(null);\n\n\t// Lineage data\n\tlet lineageEdges = $state<LineageEdge[]>([]);\n\tlet lineageGraph = $state<LineageGraph | null>(null);\n\tlet lineageSearch = $state('');\n\tlet lineageSelectedEdge = $state<LineageEdge | null>(null);\n\tlet lineageQueryText = $state('');\n\tlet lineageSheetOpen = $state(false);\n\n\t// View Graph data\n\tlet viewGraphData = $state<LineageGraph | null>(null);\n\tlet viewGraphSearch = $state('');\n\n\t// Access data\n\tlet users = $state<ChUser[]>([]);\n\tlet roles = $state<ChRole[]>([]);\n\tlet accessMatrix = $state<AccessMatrixEntry[]>([]);\n\tlet overPermissions = $state<OverPermission[]>([]);\n\tlet accessUserFilter = $state<string>('');\n\tlet accessDatabaseFilter = $state<string>('');\n\tlet accessDetailSheetOpen = $state<boolean>(false);\n\tlet selectedOverPermission = $state<OverPermission | null>(null);\n\tlet selectedOverPermissionGroup = $state<OverPermissionGroup | null>(null);\n\tlet expandedOverPermissionUsers = $state<Record<string, boolean>>({});\n\n\t// Policies data\n\tlet policies = $state<Policy[]>([]);\n\tlet violations = $state<PolicyViolation[]>([]);\n\tlet showPolicyForm = $state<boolean>(false);\n\tlet editingPolicy = $state<Policy | null>(null);\n\tlet confirmPolicyDeleteOpen = $state<boolean>(false);\n\tlet confirmPolicyDeleteLoading = $state<boolean>(false);\n\tlet pendingPolicyDelete = $state<Policy | null>(null);\n\tlet incidents = $state<GovernanceIncident[]>([]);\n\tlet incidentStatusFilter = $state<string>('');\n\tlet incidentSeverityFilter = $state<string>('');\n\tlet incidentDetailSheetOpen = $state<boolean>(false);\n\tlet selectedIncident = $state<GovernanceIncident | null>(null);\n\tlet incidentComments = $state<GovernanceIncidentComment[]>([]);\n\tlet incidentCommentDraft = $state<string>('');\n\tlet incidentCreateSheetOpen = $state<boolean>(false);\n\tlet incidentForm = $state({\n\t\ttitle: '',\n\t\tseverity: 'warn',\n\t\tstatus: 'open',\n\t\tassignee: '',\n\t\tdetails: ''\n\t});\n\tlet policyForm = $state<{\n\t\tname: string;\n\t\tdescription: string;\n\t\tobject_type: Policy['object_type'];\n\t\tobject_database: string;\n\t\tobject_table: string;\n\t\tobject_column: string;\n\t\trequired_role: string;\n\t\tseverity: string;\n\t\tenforcement_mode: Policy['enforcement_mode'];\n\t\tenabled: boolean;\n\t}>({\n\t\tname: '',\n\t\tdescription: '',\n\t\tobject_type: 'table',\n\t\tobject_database: '',\n\t\tobject_table: '',\n\t\tobject_column: '',\n\t\trequired_role: '',\n\t\tseverity: 'warn',\n\t\tenforcement_mode: 'warn',\n\t\tenabled: true\n\t});\n\n\t// ── ClickHouse Query Log state ───────────────────────────\n\tlet queryLogLoading = $state(false);\n\tlet queryLogData = $state<any[]>([]);\n\tlet queryLogMeta = $state<any[]>([]);\n\tlet qlTimeRange = $state('1h');\n\tlet qlSearch = $state('');\n\tlet qlQueryKind = $state('');\n\tlet qlStatus = $state('');\n\tlet qlLimit = $state(100);\n\tlet qlOffset = $state(0);\n\tlet expandedRow = $state<number | null>(null);\n\n\t// ── Alerts state ─────────────────────────────────────────\n\tlet alertsLoading = $state(false);\n\tlet alertChannels = $state<AlertChannel[]>([]);\n\tlet alertRules = $state<AlertRule[]>([]);\n\tlet alertEvents = $state<AlertEvent[]>([]);\n\tlet alertEventLimit = $state(50);\n\tlet alertTestRecipients = $state('');\n\tlet deletingNoteId = $state<string | null>(null);\n\tlet deletingChannel = $state<AlertChannel | null>(null);\n\tlet deletingRule = $state<AlertRule | null>(null);\n\tlet channelSheetOpen = $state(false);\n\tlet ruleSheetOpen = $state(false);\n\ttype RuleRouteDraft = {\n\t\tchannel_id: string;\n\t\trecipients: string;\n\t\tis_active: boolean;\n\t\tdelivery_mode: 'immediate' | 'digest';\n\t\tdigest_window_minutes: number;\n\t\tescalation_channel_id: string;\n\t\tescalation_recipients: string;\n\t\tescalation_after_failures: number;\n\t};\n\tlet ruleRoutesDraft = $state<RuleRouteDraft[]>([{\n\t\tchannel_id: '',\n\t\trecipients: '',\n\t\tis_active: true,\n\t\tdelivery_mode: 'immediate',\n\t\tdigest_window_minutes: 15,\n\t\tescalation_channel_id: '',\n\t\tescalation_recipients: '',\n\t\tescalation_after_failures: 0,\n\t}]);\n\tlet channelForm = $state({\n\t\tname: '',\n\t\tchannel_type: 'smtp' as AlertChannelType,\n\t\tis_active: true,\n\t\tsmtp_host: '',\n\t\tsmtp_port: 587,\n\t\tsmtp_username: '',\n\t\tsmtp_password: '',\n\t\tsmtp_from_email: '',\n\t\tsmtp_from_name: '',\n\t\tsmtp_use_tls: true,\n\t\tsmtp_starttls: false,\n\t\tapi_key: '',\n\t\tapi_from_email: '',\n\t\tapi_from_name: '',\n\t\tapi_base_url: '',\n\t});\n\tlet ruleForm = $state({\n\t\tname: '',\n\t\tevent_type: 'policy.violation',\n\t\tseverity_min: 'warn',\n\t\tenabled: true,\n\t\tcooldown_seconds: 300,\n\t\tmax_attempts: 3,\n\t\tsubject_template: '',\n\t\tbody_template: '',\n\t});\n\n\t// ── Audit Log state ──────────────────────────────────────\n\tlet auditLogs = $state<AuditLog[]>([]);\n\tlet auditLoading = $state(false);\n\tlet auditLimit = $state(100);\n\tlet auditTimeRange = $state('');\n\tlet auditAction = $state('');\n\tlet auditUsername = $state('');\n\tlet auditSearch = $state('');\n\n\tconst queryLimitOptions: ComboboxOption[] = [\n\t\t{ value: '50', label: '50 queries' },\n\t\t{ value: '100', label: '100 queries' },\n\t\t{ value: '500', label: '500 queries' },\n\t\t{ value: '1000', label: '1000 queries' }\n\t];\n\n\tconst policyObjectTypeOptions: ComboboxOption[] = [\n\t\t{ value: 'database', label: 'Database' },\n\t\t{ value: 'table', label: 'Table' },\n\t\t{ value: 'column', label: 'Column' }\n\t];\n\n\tconst policySeverityOptions: ComboboxOption[] = [\n\t\t{ value: 'info', label: 'Info' },\n\t\t{ value: 'warn', label: 'Warning' },\n\t\t{ value: 'critical', label: 'Critical' }\n\t];\n\n\tconst policyEnforcementModeOptions: ComboboxOption[] = [\n\t\t{ value: 'warn', label: 'Warn (allow)' },\n\t\t{ value: 'block', label: 'Block (deny)' }\n\t];\n\n\tconst incidentSeverityOptions: ComboboxOption[] = [\n\t\t{ value: '', label: 'All Severities' },\n\t\t{ value: 'info', label: 'info' },\n\t\t{ value: 'warn', label: 'warn' },\n\t\t{ value: 'error', label: 'error' },\n\t\t{ value: 'critical', label: 'critical' }\n\t];\n\n\tconst incidentStatusOptions: ComboboxOption[] = [\n\t\t{ value: '', label: 'All Statuses' },\n\t\t{ value: 'open', label: 'open' },\n\t\t{ value: 'triaged', label: 'triaged' },\n\t\t{ value: 'in_progress', label: 'in_progress' },\n\t\t{ value: 'resolved', label: 'resolved' },\n\t\t{ value: 'dismissed', label: 'dismissed' }\n\t];\n\n\tconst databaseFilterOptions = $derived.by<ComboboxOption[]>(() => [\n\t\t{ value: '', label: 'All Databases' },\n\t\t...databases.map((db) => ({\n\t\t\tvalue: db.name,\n\t\t\tlabel: db.name\n\t\t}))\n\t]);\n\n\tconst qlTimeRangeOptions: ComboboxOption[] = [\n\t\t{ value: '5m', label: '5 min' },\n\t\t{ value: '15m', label: '15 min' },\n\t\t{ value: '30m', label: '30 min' },\n\t\t{ value: '1h', label: '1 hour' },\n\t\t{ value: '6h', label: '6 hours' },\n\t\t{ value: '12h', label: '12 hours' },\n\t\t{ value: '24h', label: '24 hours' },\n\t\t{ value: '3d', label: '3 days' },\n\t\t{ value: '7d', label: '7 days' },\n\t];\n\n\tconst alertChannelTypeOptions: ComboboxOption[] = [\n\t\t{ value: 'smtp', label: 'SMTP' },\n\t\t{ value: 'resend', label: 'Resend' },\n\t\t{ value: 'brevo', label: 'Brevo' },\n\t];\n\n\tconst alertEventTypeOptions: ComboboxOption[] = [\n\t\t{ value: 'policy.violation', label: 'Policy Violation' },\n\t\t{ value: 'schedule.failed', label: 'Schedule Failed' },\n\t\t{ value: 'schedule.slow', label: 'Schedule Slow' },\n\t\t{ value: '*', label: 'All Events' },\n\t];\n\n\tconst alertSeverityOptions: ComboboxOption[] = [\n\t\t{ value: 'info', label: 'Info' },\n\t\t{ value: 'warn', label: 'Warning' },\n\t\t{ value: 'error', label: 'Error' },\n\t\t{ value: 'critical', label: 'Critical' },\n\t];\n\n\tconst routeDeliveryModeOptions: ComboboxOption[] = [\n\t\t{ value: 'immediate', label: 'Immediate' },\n\t\t{ value: 'digest', label: 'Digest' },\n\t];\n\n\tconst auditLimitOptions: ComboboxOption[] = [\n\t\t{ value: '50', label: '50 entries' },\n\t\t{ value: '100', label: '100 entries' },\n\t\t{ value: '500', label: '500 entries' },\n\t];\n\tconst auditTimeRangeOptions: ComboboxOption[] = [\n\t\t{ value: '', label: 'All time', keywords: 'all' },\n\t\t{ value: '15m', label: '15 min' },\n\t\t{ value: '1h', label: '1 hour' },\n\t\t{ value: '6h', label: '6 hours' },\n\t\t{ value: '24h', label: '24 hours' },\n\t\t{ value: '7d', label: '7 days' },\n\t\t{ value: '30d', label: '30 days' },\n\t];\n\tconst auditActionOptions = $derived.by((): ComboboxOption[] => {\n\t\tconst values = new Set<string>();\n\t\tif (auditAction.trim()) values.add(auditAction);\n\t\tfor (const log of auditLogs) {\n\t\t\tif (log.action?.trim()) values.add(log.action);\n\t\t}\n\t\tconst sorted = Array.from(values).sort((a, b) => a.localeCompare(b));\n\t\treturn [{ value: '', label: 'All actions', keywords: 'all' }, ...sorted.map((value) => ({ value, label: value }))];\n\t});\n\tconst auditUsernameOptions = $derived.by((): ComboboxOption[] => {\n\t\tconst values = new Set<string>();\n\t\tif (auditUsername.trim()) values.add(auditUsername);\n\t\tfor (const log of auditLogs) {\n\t\t\tif (log.username?.trim()) values.add(log.username);\n\t\t}\n\t\tconst sorted = Array.from(values).sort((a, b) => a.localeCompare(b));\n\t\treturn [{ value: '', label: 'All users', keywords: 'all' }, ...sorted.map((value) => ({ value, label: value }))];\n\t});\n\n\t// Helper functions\n\tfunction formatBytes(bytes: number): string {\n\t\tif (bytes === 0) return '0 B';\n\t\tconst k = 1024;\n\t\tconst sizes = ['B', 'KB', 'MB', 'GB', 'TB'];\n\t\tconst i = Math.floor(Math.log(bytes) / Math.log(k));\n\t\treturn Math.round(bytes / Math.pow(k, i) * 100) / 100 + ' ' + sizes[i];\n\t}\n\n\tfunction formatTime(ts: string): string {\n\t\tif (!ts) return '-';\n\t\tconst date = new Date(ts);\n\t\treturn date.toLocaleString();\n\t}\n\n\tfunction truncate(s: string, max = 80): string {\n\t\tif (!s) return '';\n\t\treturn s.length > max ? s.substring(0, max) + '...' : s;\n\t}\n\n\tfunction safeLower(v: string | null | undefined): string {\n\t\treturn (v ?? '').toLowerCase();\n\t}\n\n\tfunction formatDefaultRoles(raw: string | null | undefined): string {\n\t\tif (!raw) return '-';\n\t\tif (raw === 'ALL') return 'ALL';\n\t\tconst trimmed = raw.trim();\n\t\tif (!trimmed) return '-';\n\n\t\ttry {\n\t\t\tif (trimmed.startsWith('[') && trimmed.endsWith(']')) {\n\t\t\t\tconst parsed = JSON.parse(trimmed);\n\t\t\t\tif (Array.isArray(parsed)) {\n\t\t\t\t\treturn parsed.filter(Boolean).join(', ') || '-';\n\t\t\t\t}\n\t\t\t}\n\t\t} catch {\n\t\t\t// fall through\n\t\t}\n\n\t\treturn trimmed;\n\t}\n\n\tfunction toDayKey(ts: string): string {\n\t\tconst d = new Date(ts);\n\t\tif (Number.isNaN(d.getTime())) return '';\n\t\tconst y = d.getUTCFullYear();\n\t\tconst m = String(d.getUTCMonth() + 1).padStart(2, '0');\n\t\tconst day = String(d.getUTCDate()).padStart(2, '0');\n\t\treturn `${y}-${m}-${day}`;\n\t}\n\n\tfunction buildRecentDailySeries(dates: string[], days = 7): { x: number[]; y: number[] } {\n\t\tconst now = new Date();\n\t\tconst start = new Date(Date.UTC(now.getUTCFullYear(), now.getUTCMonth(), now.getUTCDate()));\n\t\tstart.setUTCDate(start.getUTCDate() - (days - 1));\n\n\t\tconst keys: string[] = [];\n\t\tconst x: number[] = [];\n\t\tconst y: number[] = [];\n\t\tfor (let i = 0; i < days; i++) {\n\t\t\tconst day = new Date(start);\n\t\t\tday.setUTCDate(start.getUTCDate() + i);\n\t\t\tconst key = `${day.getUTCFullYear()}-${String(day.getUTCMonth() + 1).padStart(2, '0')}-${String(day.getUTCDate()).padStart(2, '0')}`;\n\t\t\tkeys.push(key);\n\t\t\tx.push(i + 1);\n\t\t\ty.push(0);\n\t\t}\n\t\tconst idx = new Map(keys.map((k, i) => [k, i]));\n\t\tfor (const ts of dates) {\n\t\t\tconst key = toDayKey(ts);\n\t\t\tconst pos = idx.get(key);\n\t\t\tif (pos !== undefined) y[pos] += 1;\n\t\t}\n\t\treturn { x, y };\n\t}\n\n\tfunction syncStatusLabel(status: SyncState['status']): string {\n\t\tif (status === 'idle') return 'synced';\n\t\treturn status;\n\t}\n\n\tfunction overPermissionSeverity(op: OverPermission): 'critical' | 'warn' | 'info' {\n\t\tif (!op.last_query_time) return 'critical';\n\t\tconst days = op.days_since_query ?? 0;\n\t\tif (days >= 90) return 'critical';\n\t\tif (days >= 30) return 'warn';\n\t\treturn 'info';\n\t}\n\n\tconst governanceTabIds: GovernanceTab[] = ['dashboard', 'tables', 'queries', 'lineage', 'viewgraph', 'access', 'incidents', 'policies', 'querylog', 'alerts', 'auditlog', 'settings'];\n\n\tfunction normalizeGovernanceTab(value: string | null | undefined): GovernanceTab {\n\t\tconst raw = (value ?? '').trim().toLowerCase();\n\t\tif (raw === 'query-audit' || raw === 'queryaudit') return 'queries';\n\t\tif ((governanceTabIds as string[]).includes(raw)) return raw as GovernanceTab;\n\t\treturn 'dashboard';\n\t}\n\n\tfunction syncGovernanceTabParam(tab: GovernanceTab) {\n\t\tif (typeof window === 'undefined') return;\n\t\tconst url = new URL(window.location.href);\n\t\tif (url.searchParams.get('tab') === tab) return;\n\t\turl.searchParams.set('tab', tab);\n\t\thistory.replaceState(null, '', `${url.pathname}?${url.searchParams.toString()}`);\n\t}\n\n\tfunction overPermissionSeverityPriority(severity: 'critical' | 'warn' | 'info'): number {\n\t\tif (severity === 'critical') return 3;\n\t\tif (severity === 'warn') return 2;\n\t\treturn 1;\n\t}\n\n\tfunction overPermissionPanelTone(severity: 'critical' | 'warn' | 'info'): string {\n\t\tif (severity === 'critical') return 'bg-red-50 border-red-200 dark:bg-red-900/20 dark:border-red-800';\n\t\tif (severity === 'warn') return 'bg-yellow-50 border-yellow-200 dark:bg-yellow-900/20 dark:border-yellow-800';\n\t\treturn 'bg-blue-50 border-blue-200 dark:bg-blue-900/20 dark:border-blue-800';\n\t}\n\n\tfunction overPermissionBadgeTone(severity: 'critical' | 'warn' | 'info'): string {\n\t\tif (severity === 'critical') return 'bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200';\n\t\tif (severity === 'warn') return 'bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200';\n\t\treturn 'bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200';\n\t}\n\n\tfunction toggleOverPermissionGroup(userName: string) {\n\t\tconst current = expandedOverPermissionUsers[userName] ?? false;\n\t\texpandedOverPermissionUsers = { ...expandedOverPermissionUsers, [userName]: !current };\n\t}\n\n\tfunction switchTab(tab: GovernanceTab, syncUrl = true) {\n\t\tactiveTab = tab;\n\t\tif (syncUrl) syncGovernanceTabParam(tab);\n\t\t// Load data for the new tab\n\t\tif (tab === 'dashboard') {\n\t\t\tloadDashboard();\n\t\t} else if (tab === 'tables') {\n\t\t\tloadTables();\n\t\t} else if (tab === 'queries') {\n\t\t\tloadQueries();\n\t\t} else if (tab === 'lineage') {\n\t\t\tloadLineage();\n\t\t} else if (tab === 'viewgraph') {\n\t\t\tloadViewGraph();\n\t\t} else if (tab === 'access') {\n\t\t\tloadAccess();\n\t\t} else if (tab === 'incidents') {\n\t\t\tloadIncidents();\n\t\t} else if (tab === 'policies') {\n\t\t\tloadPolicies();\n\t\t} else if (tab === 'querylog') {\n\t\t\tloadQueryLog();\n\t\t} else if (tab === 'alerts') {\n\t\t\tloadAlertsAdmin();\n\t\t} else if (tab === 'auditlog') {\n\t\t\tloadAuditLogs();\n\t\t} else if (tab === 'settings') {\n\t\t\tloadGovernanceSettings();\n\t\t}\n\t}\n\n\tasync function loadGovernanceSettings() {\n\t\tgovSettingsLoading = true;\n\t\ttry {\n\t\t\tgovSettings = await fetchGovernanceSettings();\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to load governance settings: ' + err.message);\n\t\t} finally {\n\t\t\tgovSettingsLoading = false;\n\t\t}\n\t}\n\n\tasync function persistGovernanceSettings(payload: { sync_enabled?: boolean; banner_dismissed?: boolean }) {\n\t\tgovToggleSaving = true;\n\t\ttry {\n\t\t\tgovSettings = await updateGovernanceSettings(payload);\n\t\t\tif (payload.sync_enabled === true) toastSuccess('Governance sync enabled');\n\t\t\tif (payload.sync_enabled === false) toastSuccess('Governance sync disabled');\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to update governance settings: ' + err.message);\n\t\t} finally {\n\t\t\tgovToggleSaving = false;\n\t\t}\n\t}\n\n\tasync function dismissGovernanceUpgradeBanner() {\n\t\tawait persistGovernanceSettings({ banner_dismissed: true });\n\t}\n\n\tasync function confirmEnableGovernanceSync() {\n\t\tshowEnableConfirm = false;\n\t\tawait persistGovernanceSettings({ sync_enabled: true });\n\t}\n\n\tasync function confirmDisableGovernanceSync() {\n\t\tshowDisableConfirm = false;\n\t\tawait persistGovernanceSettings({ sync_enabled: false });\n\t}\n\n\tasync function loadDashboard() {\n\t\tloading = true;\n\t\ttry {\n\t\t\toverview = await fetchOverview();\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to load dashboard: ' + err.message);\n\t\t} finally {\n\t\t\tloading = false;\n\t\t}\n\t}\n\n\tasync function loadTables() {\n\t\tloading = true;\n\t\ttry {\n\t\t\tconst [dbsRes, tablesRes] = await Promise.all([\n\t\t\t\tfetchDatabases(),\n\t\t\t\tfetchTables()\n\t\t\t]);\n\t\t\tdatabases = dbsRes?.databases ?? [];\n\t\t\ttables = tablesRes?.tables ?? [];\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to load tables: ' + err.message);\n\t\t} finally {\n\t\t\tloading = false;\n\t\t}\n\t}\n\n\tasync function loadQueries() {\n\t\tloading = true;\n\t\ttry {\n\t\t\tconst [logRes, topRes] = await Promise.all([\n\t\t\t\tfetchQueryLog({ user: queryUserFilter || undefined, limit: queryLimit }),\n\t\t\t\tfetchTopQueries(10)\n\t\t\t]);\n\t\t\tqueryLog = logRes?.entries ?? [];\n\t\t\ttopQueries = topRes?.queries ?? [];\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to load query audit: ' + err.message);\n\t\t} finally {\n\t\t\tloading = false;\n\t\t}\n\t}\n\n\tasync function loadLineage() {\n\t\tloading = true;\n\t\ttry {\n\t\t\tconst res = await fetchLineageGraph(true);\n\t\t\tlineageGraph = res ?? null;\n\t\t\tlineageEdges = res?.edges ?? [];\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to load lineage: ' + err.message);\n\t\t} finally {\n\t\t\tloading = false;\n\t\t}\n\t}\n\n\tasync function loadViewGraph() {\n\t\tloading = true;\n\t\ttry {\n\t\t\tconst res = await fetchViewGraph();\n\t\t\tviewGraphData = res ?? null;\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to load view graph: ' + err.message);\n\t\t} finally {\n\t\t\tloading = false;\n\t\t}\n\t}\n\n\tasync function loadAccess() {\n\t\tloading = true;\n\t\ttry {\n\t\t\tconst [usersRes, rolesRes, matrixRes, overPermsRes] = await Promise.all([\n\t\t\t\tfetchAccessUsers(),\n\t\t\t\tfetchAccessRoles(),\n\t\t\t\tfetchAccessMatrix(),\n\t\t\t\tfetchOverPermissions()\n\t\t\t]);\n\t\t\tusers = usersRes?.users ?? [];\n\t\t\troles = rolesRes?.roles ?? [];\n\t\t\taccessMatrix = matrixRes?.matrix ?? [];\n\t\t\toverPermissions = overPermsRes?.over_permissions ?? [];\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to load access data: ' + err.message);\n\t\t} finally {\n\t\t\tloading = false;\n\t\t}\n\t}\n\n\tasync function loadPolicies() {\n\t\tloading = true;\n\t\ttry {\n\t\t\tconst [policiesRes, violationsRes] = await Promise.all([\n\t\t\t\tfetchPolicies(),\n\t\t\t\tfetchViolations()\n\t\t\t]);\n\t\t\tpolicies = policiesRes?.policies ?? [];\n\t\t\tviolations = violationsRes?.violations ?? [];\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to load policies: ' + err.message);\n\t\t} finally {\n\t\t\tloading = false;\n\t\t}\n\t}\n\n\tasync function loadIncidents() {\n\t\tloading = true;\n\t\ttry {\n\t\t\tconst res = await fetchIncidents({\n\t\t\t\tstatus: incidentStatusFilter || undefined,\n\t\t\t\tseverity: incidentSeverityFilter || undefined,\n\t\t\t\tlimit: 200\n\t\t\t});\n\t\t\tincidents = res?.incidents ?? [];\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to load incidents: ' + err.message);\n\t\t} finally {\n\t\t\tloading = false;\n\t\t}\n\t}\n\n\tasync function handleSyncNow() {\n\t\tif (govSettings && !govSettings.sync_enabled) {\n\t\t\ttoastError('Governance sync is disabled. Enable it in the Settings tab first.');\n\t\t\tswitchTab('settings');\n\t\t\treturn;\n\t\t}\n\t\tsyncing = true;\n\t\ttry {\n\t\t\tawait triggerSync();\n\t\t\ttoastSuccess('Sync started successfully');\n\t\t\t// Reload dashboard after a delay\n\t\t\tsetTimeout(() => {\n\t\t\t\tloadDashboard();\n\t\t\t}, 2000);\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to start sync: ' + err.message);\n\t\t} finally {\n\t\t\tsyncing = false;\n\t\t}\n\t}\n\n\tasync function openTableDetails(table: GovTable) {\n\t\tselectedTable = table;\n\t\tselectedTableColumns = [];\n\t\ttableNotes = [];\n\t\ttableNoteDraft = '';\n\t\ttableDetailSheetOpen = true;\n\t\ttableDetailLoading = true;\n\t\ttry {\n\t\t\tconst detail = await fetchTableDetail(table.database_name, table.table_name);\n\t\t\tselectedTableColumns = detail.columns ?? [];\n\t\t\tconst notesRes = await fetchTableNotes(table.database_name, table.table_name);\n\t\t\ttableNotes = notesRes?.notes ?? [];\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to load table details: ' + err.message);\n\t\t} finally {\n\t\t\ttableDetailLoading = false;\n\t\t}\n\t}\n\n\tfunction closeTableDetails() {\n\t\ttableDetailSheetOpen = false;\n\t\tselectedTable = null;\n\t\tselectedTableColumns = [];\n\t\ttableNotes = [];\n\t\ttableNoteDraft = '';\n\t}\n\n\tasync function addTableNote() {\n\t\tif (!selectedTable) return;\n\t\tconst comment = tableNoteDraft.trim();\n\t\tif (!comment) return;\n\t\ttry {\n\t\t\tawait apiCreateTableNote(selectedTable.database_name, selectedTable.table_name, comment);\n\t\t\ttableNoteDraft = '';\n\t\t\tconst notesRes = await fetchTableNotes(selectedTable.database_name, selectedTable.table_name);\n\t\t\ttableNotes = notesRes?.notes ?? [];\n\t\t\ttoastSuccess('Table note added');\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to add table note: ' + err.message);\n\t\t}\n\t}\n\n\tfunction deleteTableNote(noteId: string) {\n\t\tdeletingNoteId = noteId;\n\t}\n\n\tasync function confirmDeleteNote() {\n\t\tif (!deletingNoteId || !selectedTable) return;\n\t\tconst noteId = deletingNoteId;\n\t\tdeletingNoteId = null;\n\t\ttry {\n\t\t\tawait apiDeleteObjectNote(noteId);\n\t\t\tconst notesRes = await fetchTableNotes(selectedTable.database_name, selectedTable.table_name);\n\t\t\ttableNotes = notesRes?.notes ?? [];\n\t\t\ttoastSuccess('Note deleted');\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to delete note: ' + err.message);\n\t\t}\n\t}\n\n\tfunction openQueryDetails(entry: QueryLogEntry) {\n\t\tselectedQuery = entry;\n\t\tqueryDetailSheetOpen = true;\n\t}\n\n\tfunction closeQueryDetails() {\n\t\tqueryDetailSheetOpen = false;\n\t\tselectedQuery = null;\n\t}\n\n\tfunction openAccessDetails(alert: OverPermission) {\n\t\tselectedOverPermissionGroup = null;\n\t\tselectedOverPermission = alert;\n\t\taccessDetailSheetOpen = true;\n\t}\n\n\tfunction openAccessGroupDetails(group: OverPermissionGroup) {\n\t\tselectedOverPermission = null;\n\t\tselectedOverPermissionGroup = group;\n\t\taccessDetailSheetOpen = true;\n\t}\n\n\tfunction closeAccessDetails() {\n\t\taccessDetailSheetOpen = false;\n\t\tselectedOverPermission = null;\n\t\tselectedOverPermissionGroup = null;\n\t}\n\n\tfunction openIncidentDetails(incident: GovernanceIncident) {\n\t\tselectedIncident = incident;\n\t\tincidentComments = [];\n\t\tincidentCommentDraft = '';\n\t\tincidentDetailSheetOpen = true;\n\t\tvoid loadIncidentDetail(incident.id);\n\t}\n\n\tasync function loadIncidentDetail(incidentID: string) {\n\t\ttry {\n\t\t\tconst [incidentRes, commentsRes] = await Promise.all([\n\t\t\t\tapiGetIncident(incidentID),\n\t\t\t\tfetchIncidentComments(incidentID)\n\t\t\t]);\n\t\t\tselectedIncident = incidentRes?.incident ?? selectedIncident;\n\t\t\tincidentComments = commentsRes?.comments ?? [];\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to load incident detail: ' + err.message);\n\t\t}\n\t}\n\n\tfunction closeIncidentDetails() {\n\t\tincidentDetailSheetOpen = false;\n\t\tselectedIncident = null;\n\t\tincidentComments = [];\n\t\tincidentCommentDraft = '';\n\t}\n\n\tasync function createIncidentFromViolation(violation: PolicyViolation) {\n\t\ttry {\n\t\t\tconst res = await promoteViolationToIncident(violation.id);\n\t\t\ttoastSuccess(res?.created ? 'Incident created from violation' : 'Existing incident updated');\n\t\t\tawait loadIncidents();\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to promote violation: ' + err.message);\n\t\t}\n\t}\n\n\tasync function createManualIncident() {\n\t\tconst title = incidentForm.title.trim();\n\t\tif (!title) {\n\t\t\ttoastError('Incident title is required');\n\t\t\treturn;\n\t\t}\n\t\ttry {\n\t\t\tawait apiCreateIncident({\n\t\t\t\ttitle,\n\t\t\t\tseverity: incidentForm.severity,\n\t\t\t\tstatus: incidentForm.status,\n\t\t\t\tassignee: incidentForm.assignee.trim() || undefined,\n\t\t\t\tdetails: incidentForm.details.trim() || undefined,\n\t\t\t});\n\t\t\tincidentCreateSheetOpen = false;\n\t\t\tincidentForm = {\n\t\t\t\ttitle: '',\n\t\t\t\tseverity: 'warn',\n\t\t\t\tstatus: 'open',\n\t\t\t\tassignee: '',\n\t\t\t\tdetails: ''\n\t\t\t};\n\t\t\ttoastSuccess('Incident created');\n\t\t\tawait loadIncidents();\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to create incident: ' + err.message);\n\t\t}\n\t}\n\n\tasync function saveIncidentUpdates() {\n\t\tif (!selectedIncident) return;\n\t\ttry {\n\t\t\tawait apiUpdateIncident(selectedIncident.id, {\n\t\t\t\ttitle: selectedIncident.title,\n\t\t\t\tseverity: selectedIncident.severity,\n\t\t\t\tstatus: selectedIncident.status,\n\t\t\t\tassignee: selectedIncident.assignee ?? '',\n\t\t\t\tdetails: selectedIncident.details ?? '',\n\t\t\t\tresolution_note: selectedIncident.resolution_note ?? '',\n\t\t\t});\n\t\t\ttoastSuccess('Incident updated');\n\t\t\tawait loadIncidentDetail(selectedIncident.id);\n\t\t\tawait loadIncidents();\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to update incident: ' + err.message);\n\t\t}\n\t}\n\n\tasync function addIncidentComment() {\n\t\tif (!selectedIncident) return;\n\t\tconst comment = incidentCommentDraft.trim();\n\t\tif (!comment) return;\n\t\ttry {\n\t\t\tawait apiCreateIncidentComment(selectedIncident.id, comment);\n\t\t\tincidentCommentDraft = '';\n\t\t\tawait loadIncidentDetail(selectedIncident.id);\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to add incident comment: ' + err.message);\n\t\t}\n\t}\n\n\tfunction openPolicyForm(policy?: Policy) {\n\t\tif (policy) {\n\t\t\teditingPolicy = policy;\n\t\t\tpolicyForm = {\n\t\t\t\tname: policy.name,\n\t\t\t\tdescription: policy.description ?? '',\n\t\t\t\tobject_type: policy.object_type,\n\t\t\t\tobject_database: policy.object_database ?? '',\n\t\t\t\tobject_table: policy.object_table ?? '',\n\t\t\t\tobject_column: policy.object_column ?? '',\n\t\t\t\trequired_role: policy.required_role,\n\t\t\t\tseverity: policy.severity,\n\t\t\t\tenforcement_mode: policy.enforcement_mode ?? 'warn',\n\t\t\t\tenabled: policy.enabled\n\t\t\t};\n\t\t} else {\n\t\t\teditingPolicy = null;\n\t\t\tpolicyForm = {\n\t\t\t\tname: '',\n\t\t\t\tdescription: '',\n\t\t\t\tobject_type: 'table',\n\t\t\t\tobject_database: '',\n\t\t\t\tobject_table: '',\n\t\t\t\tobject_column: '',\n\t\t\t\trequired_role: '',\n\t\t\t\tseverity: 'warn',\n\t\t\t\tenforcement_mode: 'warn',\n\t\t\t\tenabled: true\n\t\t\t};\n\t\t}\n\t\tshowPolicyForm = true;\n\t}\n\n\tfunction closePolicyForm() {\n\t\tshowPolicyForm = false;\n\t\teditingPolicy = null;\n\t}\n\n\tasync function handlePolicySubmit() {\n\t\tloading = true;\n\t\ttry {\n\t\t\tif (editingPolicy) {\n\t\t\t\tawait apiUpdatePolicy(editingPolicy.id, policyForm);\n\t\t\t\ttoastSuccess('Policy updated successfully');\n\t\t\t} else {\n\t\t\t\tawait apiCreatePolicy(policyForm);\n\t\t\t\ttoastSuccess('Policy created successfully');\n\t\t\t}\n\t\t\tclosePolicyForm();\n\t\t\tawait loadPolicies();\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to save policy: ' + err.message);\n\t\t} finally {\n\t\t\tloading = false;\n\t\t}\n\t}\n\n\tfunction requestDeletePolicy(policy: Policy) {\n\t\tpendingPolicyDelete = policy;\n\t\tconfirmPolicyDeleteOpen = true;\n\t}\n\n\tfunction cancelDeletePolicy() {\n\t\tconfirmPolicyDeleteOpen = false;\n\t\tpendingPolicyDelete = null;\n\t}\n\n\tasync function confirmDeletePolicy() {\n\t\tif (!pendingPolicyDelete) return;\n\t\tconfirmPolicyDeleteLoading = true;\n\t\ttry {\n\t\t\tawait apiDeletePolicy(pendingPolicyDelete.id);\n\t\t\ttoastSuccess('Policy deleted successfully');\n\t\t\tcancelDeletePolicy();\n\t\t\tawait loadPolicies();\n\t\t} catch (err: any) {\n\t\t\ttoastError('Failed to delete policy: ' + err.message);\n\t\t} finally {\n\t\t\tconfirmPolicyDeleteLoading = false;\n\t\t}\n\t}\n\n\t// ── ClickHouse Query Log ─────────────────────────────────\n\tasync function loadQueryLog() {\n\t\tqueryLogLoading = true;\n\t\ttry {\n\t\t\tconst params = new URLSearchParams();\n\t\t\tif (qlTimeRange) params.set('timeRange', qlTimeRange);\n\t\t\tif (qlSearch.trim()) params.set('search', qlSearch.trim());\n\t\t\tif (qlQueryKind) params.set('queryKind', qlQueryKind);\n\t\t\tif (qlStatus) params.set('status', qlStatus);\n\t\t\tparams.set('limit', String(qlLimit));\n\t\t\tparams.set('offset', String(qlOffset));\n\t\t\tconst res = await apiGet<{ data: any[]; meta: any[] }>(`/api/governance/clickhouse-query-log?${params}`);\n\t\t\tqueryLogData = res.data ?? [];\n\t\t\tqueryLogMeta = res.meta ?? [];\n\t\t} catch (e: any) {\n\t\t\ttoastError(e.message);\n\t\t} finally {\n\t\t\tqueryLogLoading = false;\n\t\t}\n\t}\n\n\t// ── Alerts admin ─────────────────────────────────────────\n\tasync function loadAlertsAdmin() {\n\t\talertsLoading = true;\n\t\ttry {\n\t\t\tconst [channels, rules, events] = await Promise.all([\n\t\t\t\tadminListAlertChannels(),\n\t\t\t\tadminListAlertRules(),\n\t\t\t\tadminListAlertEvents({ limit: alertEventLimit }),\n\t\t\t]);\n\t\t\talertChannels = channels;\n\t\t\talertRules = rules;\n\t\t\talertEvents = events;\n\t\t} catch (e: any) {\n\t\t\ttoastError(e.message);\n\t\t} finally {\n\t\t\talertsLoading = false;\n\t\t}\n\t}\n\n\tasync function createAlertChannelRecord() {\n\t\tconst config: Record<string, unknown> = {};\n\t\tif (channelForm.channel_type === 'smtp') {\n\t\t\tconfig.smtp_host = channelForm.smtp_host;\n\t\t\tconfig.smtp_port = channelForm.smtp_port;\n\t\t\tconfig.smtp_username = channelForm.smtp_username;\n\t\t\tconfig.smtp_password = channelForm.smtp_password;\n\t\t\tconfig.from_email = channelForm.smtp_from_email;\n\t\t\tconfig.from_name = channelForm.smtp_from_name;\n\t\t\tconfig.use_tls = channelForm.smtp_use_tls;\n\t\t\tconfig.starttls = channelForm.smtp_starttls;\n\t\t} else {\n\t\t\tconfig.api_key = channelForm.api_key;\n\t\t\tconfig.from_email = channelForm.api_from_email;\n\t\t\tconfig.from_name = channelForm.api_from_name;\n\t\t\tif (channelForm.api_base_url) config.base_url = channelForm.api_base_url;\n\t\t}\n\t\ttry {\n\t\t\tawait adminCreateAlertChannel({\n\t\t\t\tname: channelForm.name,\n\t\t\t\tchannel_type: channelForm.channel_type,\n\t\t\t\tis_active: channelForm.is_active,\n\t\t\t\tconfig,\n\t\t\t});\n\t\t\ttoastSuccess('Alert channel created');\n\t\t\tchannelSheetOpen = false;\n\t\t\tchannelForm = { ...channelForm, name: '', smtp_host: '', smtp_port: 587, smtp_username: '', smtp_password: '', smtp_from_email: '', smtp_from_name: '', smtp_use_tls: true, smtp_starttls: false, api_key: '', api_from_email: '', api_from_name: '', api_base_url: '' };\n\t\t\tawait loadAlertsAdmin();\n\t\t} catch (e: any) {\n\t\t\ttoastError(e.message);\n\t\t}\n\t}\n\n\tasync function toggleAlertChannel(channel: AlertChannel, isActive: boolean) {\n\t\ttry {\n\t\t\tawait adminUpdateAlertChannel(channel.id, { is_active: isActive });\n\t\t\ttoastSuccess(`Channel \"${channel.name}\" ${isActive ? 'activated' : 'deactivated'}`);\n\t\t\tawait loadAlertsAdmin();\n\t\t} catch (e: any) {\n\t\t\ttoastError(e.message);\n\t\t}\n\t}\n\n\tfunction deleteAlertChannelRecord(channel: AlertChannel) {\n\t\tdeletingChannel = channel;\n\t}\n\n\tasync function confirmDeleteChannel() {\n\t\tif (!deletingChannel) return;\n\t\tconst channel = deletingChannel;\n\t\tdeletingChannel = null;\n\t\ttry {\n\t\t\tawait adminDeleteAlertChannel(channel.id);\n\t\t\ttoastSuccess('Channel deleted');\n\t\t\tawait loadAlertsAdmin();\n\t\t} catch (e: any) {\n\t\t\ttoastError(e.message);\n\t\t}\n\t}\n\n\tasync function testAlertChannelRecord(channel: AlertChannel) {\n\t\tconst recipients = alertTestRecipients.split(',').map((r) => r.trim()).filter(Boolean);\n\t\tif (recipients.length === 0) {\n\t\t\ttoastError('Enter at least one test recipient');\n\t\t\treturn;\n\t\t}\n\t\ttry {\n\t\t\tawait adminTestAlertChannel(channel.id, { recipients });\n\t\t\ttoastSuccess('Test alert sent');\n\t\t} catch (e: any) {\n\t\t\ttoastError(e.message);\n\t\t}\n\t}\n\n\tasync function createAlertRuleRecord() {\n\t\tconst routes: AlertRuleRoutePayload[] = ruleRoutesDraft.map((r) => ({\n\t\t\tchannel_id: r.channel_id,\n\t\t\trecipients: r.recipients.split(',').map((s) => s.trim()).filter(Boolean),\n\t\t\tis_active: r.is_active,\n\t\t\tdelivery_mode: r.delivery_mode,\n\t\t\tdigest_window_minutes: r.delivery_mode === 'digest' ? r.digest_window_minutes : undefined,\n\t\t\tescalation_channel_id: r.escalation_channel_id || undefined,\n\t\t\tescalation_recipients: r.escalation_recipients ? r.escalation_recipients.split(',').map((s) => s.trim()).filter(Boolean) : undefined,\n\t\t\tescalation_after_failures: r.escalation_after_failures || undefined,\n\t\t}));\n\t\ttry {\n\t\t\tawait adminCreateAlertRule({\n\t\t\t\tname: ruleForm.name,\n\t\t\t\tevent_type: ruleForm.event_type,\n\t\t\t\tseverity_min: ruleForm.severity_min,\n\t\t\t\tenabled: ruleForm.enabled,\n\t\t\t\tcooldown_seconds: ruleForm.cooldown_seconds,\n\t\t\t\tmax_attempts: ruleForm.max_attempts,\n\t\t\t\tsubject_template: ruleForm.subject_template || undefined,\n\t\t\t\tbody_template: ruleForm.body_template || undefined,\n\t\t\t\troutes,\n\t\t\t});\n\t\t\ttoastSuccess('Alert rule created');\n\t\t\truleSheetOpen = false;\n\t\t\truleForm = { ...ruleForm, name: '' };\n\t\t\truleRoutesDraft = [{ channel_id: '', recipients: '', is_active: true, delivery_mode: 'immediate', digest_window_minutes: 15, escalation_channel_id: '', escalation_recipients: '', escalation_after_failures: 0 }];\n\t\t\tawait loadAlertsAdmin();\n\t\t} catch (e: any) {\n\t\t\ttoastError(e.message);\n\t\t}\n\t}\n\n\tasync function toggleAlertRule(rule: AlertRule, enabled: boolean) {\n\t\ttry {\n\t\t\tawait adminUpdateAlertRule(rule.id, { enabled });\n\t\t\ttoastSuccess(`Rule \"${rule.name}\" ${enabled ? 'enabled' : 'disabled'}`);\n\t\t\tawait loadAlertsAdmin();\n\t\t} catch (e: any) {\n\t\t\ttoastError(e.message);\n\t\t}\n\t}\n\n\tfunction deleteAlertRuleRecord(rule: AlertRule) {\n\t\tdeletingRule = rule;\n\t}\n\n\tasync function confirmDeleteRule() {\n\t\tif (!deletingRule) return;\n\t\tconst rule = deletingRule;\n\t\tdeletingRule = null;\n\t\ttry {\n\t\t\tawait adminDeleteAlertRule(rule.id);\n\t\t\ttoastSuccess('Rule deleted');\n\t\t\tawait loadAlertsAdmin();\n\t\t} catch (e: any) {\n\t\t\ttoastError(e.message);\n\t\t}\n\t}\n\n\tfunction alertChannelOptions(): ComboboxOption[] {\n\t\treturn alertChannels.map((ch) => ({ value: ch.id, label: `${ch.name} (${ch.channel_type})` }));\n\t}\n\n\tfunction addRuleRouteDraft() {\n\t\truleRoutesDraft = [...ruleRoutesDraft, { channel_id: '', recipients: '', is_active: true, delivery_mode: 'immediate', digest_window_minutes: 15, escalation_channel_id: '', escalation_recipients: '', escalation_after_failures: 0 }];\n\t}\n\n\tfunction removeRuleRouteDraft(idx: number) {\n\t\truleRoutesDraft = ruleRoutesDraft.filter((_, i) => i !== idx);\n\t}\n\n\tfunction updateRuleRouteDraft(idx: number, patch: Partial<RuleRouteDraft>) {\n\t\truleRoutesDraft = ruleRoutesDraft.map((r, i) => (i === idx ? { ...r, ...patch } : r));\n\t}\n\n\t// ── Audit Log ────────────────────────────────────────────\n\tasync function loadAuditLogs() {\n\t\tauditLoading = true;\n\t\ttry {\n\t\t\tconst params = new URLSearchParams();\n\t\t\tparams.set('limit', String(auditLimit));\n\t\t\tif (auditTimeRange) params.set('timeRange', auditTimeRange);\n\t\t\tif (auditAction) params.set('action', auditAction);\n\t\t\tif (auditUsername) params.set('username', auditUsername);\n\t\t\tif (auditSearch.trim()) params.set('search', auditSearch.trim());\n\t\t\tconst res = await apiGet<AuditLog[]>(`/api/governance/audit-logs?${params}`);\n\t\t\tauditLogs = res ?? [];\n\t\t} catch (e: any) {\n\t\t\ttoastError(e.message);\n\t\t} finally {\n\t\t\tauditLoading = false;\n\t\t}\n\t}\n\n\t// Computed values\n\tlet filteredTables = $derived(\n\t\ttables.filter((t) => {\n\t\t\tconst matchesDb = !selectedDatabase || t.database_name === selectedDatabase;\n\t\t\tconst matchesSearch =\n\t\t\t\t!tableSearchQuery ||\n\t\t\t\tt.table_name.toLowerCase().includes(tableSearchQuery.toLowerCase()) ||\n\t\t\t\tt.database_name.toLowerCase().includes(tableSearchQuery.toLowerCase());\n\t\t\treturn matchesDb && matchesSearch;\n\t\t})\n\t);\n\n\tlet filteredAccessMatrix = $derived(\n\t\taccessMatrix.filter((entry) => {\n\t\t\tconst matchesUser = !accessUserFilter || safeLower(entry.user_name).includes(accessUserFilter.toLowerCase());\n\t\t\tconst matchesDb = !accessDatabaseFilter || safeLower(entry.database_name).includes(accessDatabaseFilter.toLowerCase());\n\t\t\treturn matchesUser && matchesDb;\n\t\t})\n\t);\n\n\tlet dashboardViolationTrend = $derived.by(() => {\n\t\tconst dates = (overview?.recent_violations ?? []).map((v) => v.detected_at).filter(Boolean);\n\t\treturn buildRecentDailySeries(dates, 7);\n\t});\n\n\tlet dashboardSchemaTrend = $derived.by(() => {\n\t\tconst dates = (overview?.recent_changes ?? []).map((c) => c.detected_at).filter(Boolean);\n\t\treturn buildRecentDailySeries(dates, 7);\n\t});\n\n\tlet groupedOverPermissions = $derived.by<OverPermissionGroup[]>(() => {\n\t\tconst byUser = new Map<string, OverPermission[]>();\n\n\t\tfor (const alert of overPermissions) {\n\t\t\tconst key = alert.user_name || '(unknown user)';\n\t\t\tconst bucket = byUser.get(key);\n\t\t\tif (bucket) {\n\t\t\t\tbucket.push(alert);\n\t\t\t} else {\n\t\t\t\tbyUser.set(key, [alert]);\n\t\t\t}\n\t\t}\n\n\t\treturn Array.from(byUser.entries())\n\t\t\t.map(([userName, alerts]) => {\n\t\t\t\tconst sortedAlerts = [...alerts].sort((a, b) => {\n\t\t\t\t\tconst sevDelta =\n\t\t\t\t\t\toverPermissionSeverityPriority(overPermissionSeverity(b)) -\n\t\t\t\t\t\toverPermissionSeverityPriority(overPermissionSeverity(a));\n\t\t\t\t\tif (sevDelta !== 0) return sevDelta;\n\t\t\t\t\tconst dbDelta = (a.database_name || '*').localeCompare(b.database_name || '*');\n\t\t\t\t\tif (dbDelta !== 0) return dbDelta;\n\t\t\t\t\treturn a.privilege.localeCompare(b.privilege);\n\t\t\t\t});\n\n\t\t\t\tlet critical = 0;\n\t\t\t\tlet warn = 0;\n\t\t\t\tlet info = 0;\n\t\t\t\tfor (const alert of sortedAlerts) {\n\t\t\t\t\tconst severity = overPermissionSeverity(alert);\n\t\t\t\t\tif (severity === 'critical') critical++;\n\t\t\t\t\telse if (severity === 'warn') warn++;\n\t\t\t\t\telse info++;\n\t\t\t\t}\n\t\t\t\tconst topSeverity: 'critical' | 'warn' | 'info' =\n\t\t\t\t\tcritical > 0 ? 'critical' : warn > 0 ? 'warn' : 'info';\n\n\t\t\t\treturn {\n\t\t\t\t\tuserName,\n\t\t\t\t\talerts: sortedAlerts,\n\t\t\t\t\ttotal: sortedAlerts.length,\n\t\t\t\t\tdatabases: new Set(sortedAlerts.map((alert) => alert.database_name || '*')).size,\n\t\t\t\t\tcritical,\n\t\t\t\t\twarn,\n\t\t\t\t\tinfo,\n\t\t\t\t\ttopSeverity\n\t\t\t\t};\n\t\t\t})\n\t\t\t.sort((a, b) => {\n\t\t\t\tconst sevDelta =\n\t\t\t\t\toverPermissionSeverityPriority(b.topSeverity) - overPermissionSeverityPriority(a.topSeverity);\n\t\t\t\tif (sevDelta !== 0) return sevDelta;\n\t\t\t\tif (a.total !== b.total) return b.total - a.total;\n\t\t\t\treturn a.userName.localeCompare(b.userName);\n\t\t\t});\n\t});\n\n\tonMount(() => {\n\t\tconst initialTab = normalizeGovernanceTab(\n\t\t\ttypeof window === 'undefined' ? null : new URLSearchParams(window.location.search).get('tab')\n\t\t);\n\t\tswitchTab(initialTab, true);\n\t\t// Load settings in the background so the upgrade banner + sync-state\n\t\t// indicator can render regardless of which tab the user lands on.\n\t\tloadGovernanceSettings();\n\t});\n</script>\n\n<div class=\"flex flex-col h-full\">\n\t{#if govSettings && !govSettings.sync_enabled && !govSettings.banner_dismissed}\n\t\t<div class=\"flex items-start gap-3 border-b border-blue-200 bg-blue-50 px-4 py-3 text-sm text-blue-900 dark:border-blue-900 dark:bg-blue-950/40 dark:text-blue-100\">\n\t\t\t<Info class=\"w-5 h-5 flex-shrink-0 mt-0.5\" />\n\t\t\t<div class=\"flex-1 min-w-0\">\n\t\t\t\t<p class=\"font-medium\">Governance background sync is now opt-in.</p>\n\t\t\t\t<p class=\"mt-0.5 text-blue-800 dark:text-blue-200/90\">\n\t\t\t\t\tYour existing data is preserved, but the syncer is paused until you enable it explicitly.\n\t\t\t\t\t<button\n\t\t\t\t\t\tclass=\"underline underline-offset-2 font-medium ml-1 hover:text-blue-950 dark:hover:text-white\"\n\t\t\t\t\t\tonclick={() => switchTab('settings')}\n\t\t\t\t\t>Review settings →</button>\n\t\t\t\t</p>\n\t\t\t</div>\n\t\t\t<button\n\t\t\t\tclass=\"flex-shrink-0 p-1 rounded hover:bg-blue-100 dark:hover:bg-blue-900/50\"\n\t\t\t\taria-label=\"Dismiss banner\"\n\t\t\t\tonclick={dismissGovernanceUpgradeBanner}\n\t\t\t\tdisabled={govToggleSaving}\n\t\t\t>\n\t\t\t\t<X class=\"w-4 h-4\" />\n\t\t\t</button>\n\t\t</div>\n\t{/if}\n\t<div class=\"border-b border-gray-200 dark:border-gray-800\">\n\t\t<div class=\"flex flex-col gap-3 px-4 py-3 md:flex-row md:items-center md:justify-between\">\n\t\t\t<div class=\"flex min-w-0 flex-col gap-2 md:flex-row md:items-center md:gap-4\">\n\t\t\t\t<h1 class=\"ds-page-title\">Governance</h1>\n\t\t\t\t<nav class=\"ds-tabs border-0 px-0 pt-0 gap-1 overflow-x-auto whitespace-nowrap\" aria-label=\"Tabs\">\n\t\t\t\t\t{#each governanceTabs as tab}\n\t\t\t\t\t\t<button\n\t\t\t\t\t\t\tonclick={() => switchTab(tab.id)}\n\t\t\t\t\t\t\tclass={`ds-tab ${\n\t\t\t\t\t\t\t\tactiveTab === tab.id\n\t\t\t\t\t\t\t\t\t? 'ds-tab-active'\n\t\t\t\t\t\t\t\t\t: ''\n\t\t\t\t\t\t\t}`}\n\t\t\t\t\t\t>\n\t\t\t\t\t\t\t{tab.label}\n\t\t\t\t\t\t</button>\n\t\t\t\t\t{/each}\n\t\t\t\t</nav>\n\t\t\t</div>\n\t\t\t<div>\n\t\t\t\t<button\n\t\t\t\t\tonclick={handleSyncNow}\n\t\t\t\t\tdisabled={syncing}\n\t\t\t\t\tclass=\"ds-btn-primary px-3 py-1.5 text-sm disabled:opacity-50 disabled:cursor-not-allowed\"\n\t\t\t\t>\n\t\t\t\t\t<RefreshCw class={`w-4 h-4 ${syncing ? 'animate-spin' : ''}`} />\n\n\t\t\t\t</button>\n\t\t\t</div>\n\t\t</div>\n\t</div>\n\n\t<div class={`flex-1 overflow-auto p-4 ${activeTab === 'viewgraph' || activeTab === 'lineage' ? 'flex flex-col' : ''}`}>\n\t\t<div class={`${activeTab === 'viewgraph' || activeTab === 'lineage' ? 'flex-1 flex flex-col min-h-0' : 'max-w-7xl mx-auto'}`}>\n\t\t\t{#if loading && !overview && !tables.length && !queryLog.length && !lineageEdges.length && !users.length && !policies.length}\n\t\t\t\t<div class=\"flex justify-center items-center py-12\">\n\t\t\t\t\t<Spinner size=\"lg\" />\n\t\t\t\t</div>\n\t\t\t{:else}\n\t\t\t\t\t<!-- Dashboard Tab -->\n\t\t\t\t\t{#if activeTab === 'dashboard'}\n\t\t\t\t\t\t{#if overview}\n\t\t\t\t\t\t\t<!-- Stats Cards -->\n\t\t\t\t\t\t\t<div class=\"grid grid-cols-2 lg:grid-cols-5 gap-3 mb-4\">\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t\t\t\t\t<Database class=\"w-4 h-4 text-ch-blue\" />\n\t\t\t\t\t\t\t\t\t\t<span class=\"text-xl font-bold text-gray-900 dark:text-white\">{overview.database_count}</span>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<p class=\"text-[11px] text-gray-600 dark:text-gray-400 mt-1\">Databases</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t\t\t\t\t<Table2 class=\"w-4 h-4 text-ch-blue\" />\n\t\t\t\t\t\t\t\t\t\t<span class=\"text-xl font-bold text-gray-900 dark:text-white\">{overview.table_count}</span>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<p class=\"text-[11px] text-gray-600 dark:text-gray-400 mt-1\">Tables</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t\t\t\t\t<Columns3 class=\"w-4 h-4 text-ch-blue\" />\n\t\t\t\t\t\t\t\t\t\t<span class=\"text-xl font-bold text-gray-900 dark:text-white\">{overview.column_count}</span>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<p class=\"text-[11px] text-gray-600 dark:text-gray-400 mt-1\">Columns</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t\t\t\t\t<Users class=\"w-4 h-4 text-ch-blue\" />\n\t\t\t\t\t\t\t\t\t\t<span class=\"text-xl font-bold text-gray-900 dark:text-white\">{overview.user_count}</span>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<p class=\"text-[11px] text-gray-600 dark:text-gray-400 mt-1\">Users</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t\t\t\t\t<Search class=\"w-4 h-4 text-ch-blue\" />\n\t\t\t\t\t\t\t\t\t\t<span class=\"text-xl font-bold text-gray-900 dark:text-white\">{overview.query_count_24h}</span>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<p class=\"text-[11px] text-gray-600 dark:text-gray-400 mt-1\">Queries (24h)</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t\t\t\t\t<GitBranch class=\"w-4 h-4 text-ch-blue\" />\n\t\t\t\t\t\t\t\t\t\t<span class=\"text-xl font-bold text-gray-900 dark:text-white\">{overview.lineage_edge_count}</span>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<p class=\"text-[11px] text-gray-600 dark:text-gray-400 mt-1\">Lineage Edges</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t\t\t\t\t<Shield class=\"w-4 h-4 text-ch-blue\" />\n\t\t\t\t\t\t\t\t\t\t<span class=\"text-xl font-bold text-gray-900 dark:text-white\">{overview.policy_count}</span>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<p class=\"text-[11px] text-gray-600 dark:text-gray-400 mt-1\">Policies</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t\t\t\t\t<AlertTriangle class=\"w-4 h-4 text-red-500\" />\n\t\t\t\t\t\t\t\t\t\t<span class=\"text-xl font-bold text-gray-900 dark:text-white\">{overview.violation_count}</span>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<p class=\"text-[11px] text-gray-600 dark:text-gray-400 mt-1\">Violations</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t\t\t\t\t<Siren class=\"w-4 h-4 text-orange-500\" />\n\t\t\t\t\t\t\t\t\t\t<span class=\"text-xl font-bold text-gray-900 dark:text-white\">{overview.incident_count || 0}</span>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<p class=\"text-[11px] text-gray-600 dark:text-gray-400 mt-1\">Open Incidents</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t\t\t\t\t<Table2 class=\"w-4 h-4 text-green-500\" />\n\t\t\t\t\t\t\t\t\t\t<span class=\"text-xl font-bold text-gray-900 dark:text-white\">{overview.tagged_table_count}</span>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<p class=\"text-[11px] text-gray-600 dark:text-gray-400 mt-1\">Tagged Tables</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<!-- Trends -->\n\t\t\t\t\t\t\t<div class=\"grid grid-cols-1 lg:grid-cols-2 gap-4 mb-6\">\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between mb-2\">\n\t\t\t\t\t\t\t\t\t\t<h3 class=\"text-xs font-semibold uppercase tracking-wide text-gray-600 dark:text-gray-300\">\n\t\t\t\t\t\t\t\t\t\t\tViolations Trend (7d)\n\t\t\t\t\t\t\t\t\t\t</h3>\n\t\t\t\t\t\t\t\t\t\t<HelpTip text=\"uPlot sparkline of policy violations detected over the last 7 days.\" />\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<MiniTrendChart x={dashboardViolationTrend.x} y={dashboardViolationTrend.y} color=\"#ef4444\" fill=\"rgba(239,68,68,0.18)\" height={110} />\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between mb-2\">\n\t\t\t\t\t\t\t\t\t\t<h3 class=\"text-xs font-semibold uppercase tracking-wide text-gray-600 dark:text-gray-300\">\n\t\t\t\t\t\t\t\t\t\t\tSchema Change Trend (7d)\n\t\t\t\t\t\t\t\t\t\t</h3>\n\t\t\t\t\t\t\t\t\t\t<HelpTip text=\"uPlot sparkline of metadata/schema change events over the last 7 days.\" />\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<MiniTrendChart x={dashboardSchemaTrend.x} y={dashboardSchemaTrend.y} color=\"#10b981\" fill=\"rgba(16,185,129,0.18)\" height={110} />\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<!-- Sync Status -->\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-6 mb-8\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between mb-4\">\n\t\t\t\t\t\t\t\t\t\t<h2 class=\"text-lg font-semibold text-gray-900 dark:text-white\">Sync Status</h2>\n\t\t\t\t\t\t\t\t\t\t<div class=\"text-xs text-gray-500 dark:text-gray-400\">Status updates from governance sync workers</div>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"space-y-3\">\n\t\t\t\t\t\t\t\t\t\t{#each overview.sync_states ?? [] as syncState}\n\t\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t\t\t\t\t\t<span class=\"text-sm font-medium text-gray-700 dark:text-gray-300\">{syncState.sync_type}</span>\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"flex items-center space-x-2\">\n\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500 dark:text-gray-400\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t{syncState.last_synced_at ? formatTime(syncState.last_synced_at) : 'Never'}\n\t\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<span\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass={`inline-flex items-center px-2 py-1 rounded-full text-xs font-medium ${\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tsyncState.status === 'idle'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: syncState.status === 'running'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: syncState.status === 'error'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: 'bg-gray-100 text-gray-800 dark:bg-gray-700 dark:text-gray-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t}`}\n\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{syncStatusLabel(syncState.status)}\n\t\t\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<!-- Recent Changes and Violations -->\n\t\t\t\t\t\t\t<div class=\"grid grid-cols-1 md:grid-cols-2 gap-6\">\n\t\t\t\t\t\t\t\t<!-- Recent Schema Changes -->\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-6\">\n\t\t\t\t\t\t\t\t\t<h2 class=\"text-lg font-semibold text-gray-900 dark:text-white mb-4\">Recent Schema Changes</h2>\n\t\t\t\t\t\t\t\t\t{#if overview.recent_changes && overview.recent_changes.length > 0}\n\t\t\t\t\t\t\t\t\t\t<div class=\"space-y-3\">\n\t\t\t\t\t\t\t\t\t\t\t{#each overview.recent_changes as change}\n\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"text-sm\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between mb-1\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"font-medium text-gray-900 dark:text-white\">{change.database_name}.{change.table_name}</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500\">{formatTime(change.detected_at)}</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-gray-600 dark:text-gray-400\">{change.change_type}</p>\n\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 dark:text-gray-400\">No recent changes</p>\n\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t\t<!-- Recent Violations -->\n\t\t\t\t\t\t\t\t<div class=\"ds-panel p-6\">\n\t\t\t\t\t\t\t\t\t<h2 class=\"text-lg font-semibold text-gray-900 dark:text-white mb-4\">Recent Violations</h2>\n\t\t\t\t\t\t\t\t\t{#if overview.recent_violations && overview.recent_violations.length > 0}\n\t\t\t\t\t\t\t\t\t\t<div class=\"space-y-3\">\n\t\t\t\t\t\t\t\t\t\t\t{#each overview.recent_violations as violation}\n\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"text-sm\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between mb-1\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"font-medium text-gray-900 dark:text-white\">{violation.policy_name}</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass={`inline-flex items-center px-2 py-1 rounded-full text-xs font-medium ${\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tviolation.severity === 'critical'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: violation.severity === 'warn'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: 'bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t}`}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{violation.severity}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-gray-600 dark:text-gray-400\">{truncate(violation.violation_detail, 60)}</p>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-xs text-gray-500 mt-1\">{formatTime(violation.detected_at)}</p>\n\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 dark:text-gray-400\">No recent violations</p>\n\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t{/if}\n\t\t\t\t\t{/if}\n\n\t\t\t\t\t<!-- Tables Tab -->\n\t\t\t\t\t{#if activeTab === 'tables'}\n\t\t\t\t\t\t<div class=\"space-y-6\">\n\t\t\t\t\t\t\t<!-- Filters -->\n\t\t\t\t\t\t\t<div class=\"flex flex-col md:flex-row gap-4\">\n\t\t\t\t\t\t\t\t<div class=\"flex-1\">\n\t\t\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\t\t\ttype=\"text\"\n\t\t\t\t\t\t\t\t\t\tplaceholder=\"Search tables...\"\n\t\t\t\t\t\t\t\t\t\tbind:value={tableSearchQuery}\n\t\t\t\t\t\t\t\t\t\tclass=\"ds-input\"\n\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"w-full md:w-64\">\n\t\t\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\t\t\toptions={databaseFilterOptions}\n\t\t\t\t\t\t\t\t\t\tvalue={selectedDatabase}\n\t\t\t\t\t\t\t\t\t\tonChange={(v) => selectedDatabase = v}\n\t\t\t\t\t\t\t\t\t\tplaceholder=\"All Databases\"\n\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<!-- Tables List -->\n\t\t\t\t\t\t\t{#if filteredTables.length > 0}\n\t\t\t\t\t\t\t\t<div class=\"overflow-x-auto\">\n\t\t\t\t\t\t\t\t\t<table class=\"ds-table\">\n\t\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Database</th>\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Table</th>\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Engine</th>\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Rows</th>\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Size</th>\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Tags</th>\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-right\">Details</th>\n\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t\t{#each filteredTables as table}\n\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-800 dark:text-gray-200\">{table.database_name}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-800 dark:text-gray-200 font-medium\">{table.table_name}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-500 dark:text-gray-400\">{table.engine}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-500 dark:text-gray-400\">{table.total_rows.toLocaleString()}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-500 dark:text-gray-400\">{formatBytes(table.total_bytes)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{#if table.tags && table.tags.length > 0}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"flex flex-wrap gap-1\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{#each table.tags as tag}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center px-1.5 py-0.5 rounded border border-orange-200 bg-orange-100 text-orange-900 dark:border-orange-700/60 dark:bg-orange-500/15 dark:text-orange-200 text-[11px]\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{tag}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"text-gray-400\">-</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-right\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tonclick={() => openTableDetails(table)}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-btn-outline px-2 py-1\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<PanelRightOpen class=\"w-3 h-3\" />\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tView\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t<div class=\"ds-empty py-12\">\n\t\t\t\t\t\t\t\t\t<Table2 class=\"w-12 h-12 mx-auto text-gray-400 mb-4\" />\n\t\t\t\t\t\t\t\t\t<p class=\"text-gray-500 dark:text-gray-400\">No tables found</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t</div>\n\t\t\t\t\t{/if}\n\n\t\t\t\t\t<!-- Query Audit Tab -->\n\t\t\t\t\t{#if activeTab === 'queries'}\n\t\t\t\t\t\t<div class=\"space-y-6\">\n\t\t\t\t\t\t\t<!-- Filters -->\n\t\t\t\t\t\t\t<div class=\"flex flex-col md:flex-row gap-4\">\n\t\t\t\t\t\t\t\t<div class=\"flex-1\">\n\t\t\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\t\t\ttype=\"text\"\n\t\t\t\t\t\t\t\t\t\tplaceholder=\"Filter by user...\"\n\t\t\t\t\t\t\t\t\t\tbind:value={queryUserFilter}\n\t\t\t\t\t\t\t\t\t\tclass=\"ds-input\"\n\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"w-full md:w-48\">\n\t\t\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\t\t\toptions={queryLimitOptions}\n\t\t\t\t\t\t\t\t\t\tvalue={String(queryLimit)}\n\t\t\t\t\t\t\t\t\t\tonChange={(v) => {\n\t\t\t\t\t\t\t\t\t\t\tqueryLimit = Number(v) || 100;\n\t\t\t\t\t\t\t\t\t\t\tvoid loadQueries();\n\t\t\t\t\t\t\t\t\t\t}}\n\t\t\t\t\t\t\t\t\t\tplaceholder=\"Query limit\"\n\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\tonclick={() => loadQueries()}\n\t\t\t\t\t\t\t\t\tclass=\"ds-btn-primary px-4 py-2\"\n\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\tApply Filters\n\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<!-- Top Queries -->\n\t\t\t\t\t\t\t{#if topQueries.length > 0}\n\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t\t<h3 class=\"text-sm font-semibold text-gray-700 dark:text-gray-300 mb-2\">Top Queries by Execution Count</h3>\n\t\t\t\t\t\t\t\t\t<div class=\"overflow-x-auto\">\n\t\t\t\t\t\t\t\t\t\t<table class=\"ds-table\">\n\t\t\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Query</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-right\">Runs</th>\n\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t\t\t{#each topQueries as tq}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-xs text-gray-500 font-mono max-w-xl truncate\">{truncate(tq.sample_query, 140)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-right text-xs text-gray-500 whitespace-nowrap\">{tq.count} runs</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t{/if}\n\n\t\t\t\t\t\t\t<!-- Query Log -->\n\t\t\t\t\t\t\t{#if queryLog.length > 0}\n\t\t\t\t\t\t\t\t<div class=\"overflow-x-auto\">\n\t\t\t\t\t\t\t\t\t<table class=\"ds-table\">\n\t\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Time</th>\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">User</th>\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Type</th>\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Query</th>\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Duration</th>\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Rows</th>\n\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-right\">Details</th>\n\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t\t{#each queryLog as entry}\n\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-xs text-gray-500 whitespace-nowrap\">{formatTime(entry.event_time)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-800 dark:text-gray-200\">{entry.ch_user}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass={`inline-flex items-center px-1.5 py-0.5 rounded text-[11px] ${\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tentry.is_error\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-red-100 text-red-800 dark:bg-red-900/30 dark:text-red-300'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: 'bg-green-100 text-green-800 dark:bg-green-900/30 dark:text-green-300'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t}`}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{entry.query_kind}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-xs text-gray-500 max-w-xs truncate font-mono\">{truncate(entry.query_text, 80)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-xs text-gray-500 whitespace-nowrap\">{entry.duration_ms}ms</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-xs text-gray-500\">{entry.read_rows.toLocaleString()}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-right\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tonclick={() => openQueryDetails(entry)}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-btn-outline px-2 py-1\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<PanelRightOpen class=\"w-3 h-3\" />\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tView\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t<div class=\"ds-empty py-12\">\n\t\t\t\t\t\t\t\t\t<Search class=\"w-12 h-12 mx-auto text-gray-400 mb-4\" />\n\t\t\t\t\t\t\t\t\t<p class=\"text-gray-500 dark:text-gray-400\">No query logs found</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t</div>\n\t\t\t\t\t{/if}\n\n\t\t\t\t\t<!-- Lineage Tab -->\n\t\t\t\t\t{#if activeTab === 'lineage'}\n\t\t\t\t\t\t<div class=\"flex flex-col flex-1 min-h-0 gap-4\">\n\t\t\t\t\t\t\t<!-- Toolbar -->\n\t\t\t\t\t\t\t<div class=\"flex items-center gap-3\">\n\t\t\t\t\t\t\t\t<div class=\"relative flex-1 max-w-sm\">\n\t\t\t\t\t\t\t\t\t<Search size={14} class=\"absolute left-2.5 top-1/2 -translate-y-1/2 text-gray-400\" />\n\t\t\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\t\t\ttype=\"text\"\n\t\t\t\t\t\t\t\t\t\tplaceholder=\"Filter by table or database...\"\n\t\t\t\t\t\t\t\t\t\tclass=\"w-full pl-8 pr-3 py-1.5 text-sm bg-gray-100 dark:bg-gray-800 border border-gray-300 dark:border-gray-700 rounded-md text-gray-800 dark:text-gray-200 focus:outline-none focus:border-ch-blue\"\n\t\t\t\t\t\t\t\t\t\tbind:value={lineageSearch}\n\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500\">{lineageEdges.length} edge{lineageEdges.length !== 1 ? 's' : ''}</span>\n\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\tclass=\"px-2.5 py-1.5 text-xs rounded-md border border-gray-300 dark:border-gray-700 hover:bg-gray-200 dark:hover:bg-gray-800 inline-flex items-center gap-1.5\"\n\t\t\t\t\t\t\t\t\tonclick={() => loadLineage()}\n\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t<RefreshCw size={12} />\n\t\t\t\t\t\t\t\t\tRefresh\n\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<!-- Graph -->\n\t\t\t\t\t\t\t{#if lineageGraph && lineageGraph.nodes.length > 0}\n\t\t\t\t\t\t\t\t<div class=\"flex-1 min-h-0 border border-gray-200 dark:border-gray-800 rounded-lg overflow-hidden\">\n\t\t\t\t\t\t\t\t\t<LineageGraphView\n\t\t\t\t\t\t\t\t\t\tgraph={lineageGraph}\n\t\t\t\t\t\t\t\t\t\tsearchFilter={lineageSearch}\n\t\t\t\t\t\t\t\t\t\tonedgeclick={(edge) => {\n\t\t\t\t\t\t\t\t\t\t\tlineageSelectedEdge = edge;\n\t\t\t\t\t\t\t\t\t\t\tlineageQueryText = '';\n\t\t\t\t\t\t\t\t\t\t\tlineageSheetOpen = true;\n\t\t\t\t\t\t\t\t\t\t\tif (edge.query_id) {\n\t\t\t\t\t\t\t\t\t\t\t\tfetchQueryByQueryID(edge.query_id)\n\t\t\t\t\t\t\t\t\t\t\t\t\t.then((res) => { lineageQueryText = res?.entry?.query_text ?? 'Query text not available'; })\n\t\t\t\t\t\t\t\t\t\t\t\t\t.catch(() => { lineageQueryText = 'Failed to load query text'; });\n\t\t\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t\t\t}}\n\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t\t<!-- Legend -->\n\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-4 text-xs text-gray-500\">\n\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center gap-1.5\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"w-3 h-0.5 bg-orange-500 rounded\"></span> insert_select\n\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center gap-1.5\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"w-3 h-0.5 bg-blue-500 rounded\" style=\"border-top: 2px dashed;\"></span> create_as_select\n\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center gap-1.5\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"w-2 h-2 rounded-full border-2 border-blue-400\"></span> source\n\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center gap-1.5\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"w-2 h-2 rounded-full border-2 border-green-400\"></span> target\n\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center gap-1.5\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"w-2 h-2 rounded-full border-2 border-orange-400\"></span> current\n\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t<div class=\"ds-empty py-12\">\n\t\t\t\t\t\t\t\t\t<GitBranch class=\"w-12 h-12 mx-auto text-gray-400 mb-4\" />\n\t\t\t\t\t\t\t\t\t<p class=\"text-gray-500 dark:text-gray-400\">No lineage data. Run a sync to detect data flows.</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t<!-- Edge detail sheet -->\n\t\t\t\t\t\t{#if lineageSheetOpen && lineageSelectedEdge}\n\t\t\t\t\t\t\t<Sheet title=\"Lineage Edge\" open={lineageSheetOpen} onclose={() => lineageSheetOpen = false} size=\"lg\">\n\t\t\t\t\t\t\t\t<div class=\"space-y-4\">\n\t\t\t\t\t\t\t\t\t<div class=\"grid grid-cols-2 gap-3 text-sm\">\n\t\t\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"text-xs text-gray-500 mb-1\">Source</div>\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"font-medium text-gray-800 dark:text-gray-200\">{lineageSelectedEdge.source_database}.{lineageSelectedEdge.source_table}</div>\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"text-xs text-gray-500 mb-1\">Target</div>\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"font-medium text-gray-800 dark:text-gray-200\">{lineageSelectedEdge.target_database}.{lineageSelectedEdge.target_table}</div>\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"text-xs text-gray-500 mb-1\">Type</div>\n\t\t\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center px-1.5 py-0.5 rounded border border-orange-200 bg-orange-100 text-orange-900 dark:border-orange-700/60 dark:bg-orange-500/15 dark:text-orange-200 text-[11px]\">\n\t\t\t\t\t\t\t\t\t\t\t\t{lineageSelectedEdge.edge_type}\n\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"text-xs text-gray-500 mb-1\">User</div>\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"text-gray-700 dark:text-gray-300\">{lineageSelectedEdge.ch_user}</div>\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t\t\t{#if lineageSelectedEdge.column_edges && lineageSelectedEdge.column_edges.length > 0}\n\t\t\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"text-xs text-gray-500 mb-2\">Column Mappings</div>\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"grid grid-cols-[1fr_auto_1fr] gap-x-3 gap-y-1 text-xs\">\n\t\t\t\t\t\t\t\t\t\t\t\t{#each lineageSelectedEdge.column_edges as ce}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"font-mono text-gray-700 dark:text-gray-300\">{ce.source_column}</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"text-gray-400\">&rarr;</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"font-mono text-gray-700 dark:text-gray-300\">{ce.target_column}</span>\n\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t{/if}\n\n\t\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t\t\t<div class=\"text-xs text-gray-500 mb-2\">Query</div>\n\t\t\t\t\t\t\t\t\t\t{#if lineageQueryText}\n\t\t\t\t\t\t\t\t\t\t\t<pre class=\"text-xs bg-gray-100 dark:bg-gray-800 rounded-lg p-3 overflow-auto max-h-80 text-gray-800 dark:text-gray-200 whitespace-pre-wrap\">{lineageQueryText}</pre>\n\t\t\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"text-xs text-gray-500\">Loading query...</div>\n\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t</Sheet>\n\t\t\t\t\t\t{/if}\n\t\t\t\t\t{/if}\n\n\t\t\t\t\t<!-- View Graph Tab -->\n\t\t\t\t\t{#if activeTab === 'viewgraph'}\n\t\t\t\t\t\t<div class=\"flex flex-col flex-1 min-h-0 gap-4\">\n\t\t\t\t\t\t\t<!-- Toolbar -->\n\t\t\t\t\t\t\t<div class=\"flex items-center gap-3\">\n\t\t\t\t\t\t\t\t<div class=\"relative flex-1 max-w-sm\">\n\t\t\t\t\t\t\t\t\t<Search size={14} class=\"absolute left-2.5 top-1/2 -translate-y-1/2 text-gray-400\" />\n\t\t\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\t\t\ttype=\"text\"\n\t\t\t\t\t\t\t\t\t\tplaceholder=\"Filter by table or view name...\"\n\t\t\t\t\t\t\t\t\t\tclass=\"w-full pl-8 pr-3 py-1.5 text-sm bg-gray-100 dark:bg-gray-800 border border-gray-300 dark:border-gray-700 rounded-md text-gray-800 dark:text-gray-200 focus:outline-none focus:border-ch-blue\"\n\t\t\t\t\t\t\t\t\t\tbind:value={viewGraphSearch}\n\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500\">\n\t\t\t\t\t\t\t\t\t{viewGraphData?.nodes?.length ?? 0} node{(viewGraphData?.nodes?.length ?? 0) !== 1 ? 's' : ''},\n\t\t\t\t\t\t\t\t\t{viewGraphData?.edges?.length ?? 0} edge{(viewGraphData?.edges?.length ?? 0) !== 1 ? 's' : ''}\n\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\tclass=\"px-2.5 py-1.5 text-xs rounded-md border border-gray-300 dark:border-gray-700 hover:bg-gray-200 dark:hover:bg-gray-800 inline-flex items-center gap-1.5\"\n\t\t\t\t\t\t\t\t\tonclick={() => loadViewGraph()}\n\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t<RefreshCw size={12} />\n\t\t\t\t\t\t\t\t\tRefresh\n\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<!-- Graph -->\n\t\t\t\t\t\t\t{#if viewGraphData && viewGraphData.nodes.length > 0}\n\t\t\t\t\t\t\t\t<div class=\"flex-1 min-h-0 border border-gray-200 dark:border-gray-800 rounded-lg overflow-hidden\">\n\t\t\t\t\t\t\t\t\t<LineageGraphView\n\t\t\t\t\t\t\t\t\t\tgraph={viewGraphData}\n\t\t\t\t\t\t\t\t\t\tsearchFilter={viewGraphSearch}\n\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t\t<!-- Legend -->\n\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-4 text-xs text-gray-500\">\n\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center gap-1.5\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"w-3 h-0.5 bg-orange-500 rounded\"></span> view_dependency (source &rarr; view)\n\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center gap-1.5\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"w-3 h-0.5 bg-blue-500 rounded\" style=\"border-top: 2px dashed;\"></span> materialized_to (MV &rarr; target)\n\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center gap-1.5\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"w-2 h-2 rounded-full border-2 border-blue-400\"></span> source table\n\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center gap-1.5\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"w-2 h-2 rounded-full border-2 border-orange-400\"></span> materialized view\n\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center gap-1.5\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"w-2 h-2 rounded-full border-2 border-green-400\"></span> target table\n\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center gap-1.5\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"w-2 h-2 rounded-full border-2 border-purple-400\"></span> view\n\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t<div class=\"ds-empty py-12\">\n\t\t\t\t\t\t\t\t\t<GitBranch class=\"w-12 h-12 mx-auto text-gray-400 mb-4\" />\n\t\t\t\t\t\t\t\t\t<p class=\"text-gray-500 dark:text-gray-400\">No views or materialized views found. Create some views in your ClickHouse instance to see the dependency graph.</p>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t</div>\n\t\t\t\t\t{/if}\n\n\t\t\t\t\t<!-- Access Tab -->\n\t\t\t\t\t{#if activeTab === 'access'}\n\t\t\t\t\t\t<div class=\"space-y-6\">\n\t\t\t\t\t\t\t<!-- Over-Permissions Alerts -->\n\t\t\t\t\t\t\t{#if overPermissions.length > 0}\n\t\t\t\t\t\t\t\t<div class=\"ds-card p-6\">\n\t\t\t\t\t\t\t\t\t<h3 class=\"text-lg font-semibold text-gray-900 dark:text-white mb-4 flex items-center\">\n\t\t\t\t\t\t\t\t\t\t<AlertTriangle class=\"w-5 h-5 mr-2 text-yellow-500\" />\n\t\t\t\t\t\t\t\t\t\tOver-Permissions Detected\n\t\t\t\t\t\t\t\t\t</h3>\n\t\t\t\t\t\t\t\t\t<div class=\"space-y-3\">\n\t\t\t\t\t\t\t\t\t\t{#each groupedOverPermissions as group}\n\t\t\t\t\t\t\t\t\t\t\t{@const groupExpanded = expandedOverPermissionUsers[group.userName] ?? group.topSeverity === 'critical'}\n\t\t\t\t\t\t\t\t\t\t\t<div class={`rounded-lg border ${overPermissionPanelTone(group.topSeverity)}`}>\n\t\t\t\t\t\t\t\t\t\t\t\t<div\n\t\t\t\t\t\t\t\t\t\t\t\t\trole=\"button\"\n\t\t\t\t\t\t\t\t\t\t\t\t\ttabindex=\"0\"\n\t\t\t\t\t\t\t\t\t\t\t\t\tonclick={() => toggleOverPermissionGroup(group.userName)}\n\t\t\t\t\t\t\t\t\t\t\t\t\tonkeydown={(e) => {\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tif (e.key === 'Enter' || e.key === ' ') {\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\te.preventDefault();\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttoggleOverPermissionGroup(group.userName);\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t\t\t\t\t\t}}\n\t\t\t\t\t\t\t\t\t\t\t\t\tclass=\"w-full p-4 text-left flex items-center justify-between gap-3\"\n\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"min-w-0 flex items-start gap-2.5\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<ChevronRight class={`mt-0.5 w-4 h-4 shrink-0 text-gray-600 dark:text-gray-300 transition-transform ${groupExpanded ? 'rotate-90' : ''}`} />\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"min-w-0\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"font-medium text-gray-900 dark:text-white truncate\">{group.userName}</p>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-600 dark:text-gray-400\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{group.total} risky grants across {group.databases} {group.databases === 1 ? 'database' : 'databases'}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</p>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-1.5 shrink-0\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttype=\"button\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tonclick={(e) => {\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\te.stopPropagation();\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\topenAccessGroupDetails(group);\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t}}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-btn-outline px-2.5 py-1\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<PanelRightOpen class=\"w-3.5 h-3.5\" />\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tUser Details\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{#if group.critical > 0}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class={`inline-flex items-center px-2 py-1 rounded-full text-xs font-medium ${overPermissionBadgeTone('critical')}`}>critical {group.critical}</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{#if group.warn > 0}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class={`inline-flex items-center px-2 py-1 rounded-full text-xs font-medium ${overPermissionBadgeTone('warn')}`}>warn {group.warn}</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{#if group.info > 0}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class={`inline-flex items-center px-2 py-1 rounded-full text-xs font-medium ${overPermissionBadgeTone('info')}`}>info {group.info}</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t\t\t\t\t\t{#if groupExpanded}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"px-4 pb-4 space-y-2 border-t border-gray-200/70 dark:border-gray-800/70\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{#each group.alerts as alert}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{@const severity = overPermissionSeverity(alert)}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"bg-white/70 dark:bg-black/20 rounded-lg border border-gray-200/70 dark:border-gray-800/70 p-3\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"flex items-start justify-between gap-3\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"min-w-0\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-600 dark:text-gray-400\">{alert.reason}</p>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-xs text-gray-500 dark:text-gray-500 mt-1\">Database: {alert.database_name || '*'}</p>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-xs text-gray-500 dark:text-gray-500\">Privilege: {alert.privilege}</p>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-2 shrink-0\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class={`inline-flex items-center px-2 py-1 rounded-full text-xs font-medium ${overPermissionBadgeTone(severity)}`}>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{severity}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tonclick={() => openAccessDetails(alert)}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-btn-outline px-2.5 py-1\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<PanelRightOpen class=\"w-3.5 h-3.5\" />\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tDetails\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t{/if}\n\n\t\t\t\t\t\t\t<!-- Users -->\n\t\t\t\t\t\t\t<div class=\"ds-card p-6\">\n\t\t\t\t\t\t\t\t<h3 class=\"text-lg font-semibold text-gray-900 dark:text-white mb-4\">Users</h3>\n\t\t\t\t\t\t\t\t{#if users.length > 0}\n\t\t\t\t\t\t\t\t\t<div class=\"overflow-x-auto\">\n\t\t\t\t\t\t\t\t\t\t<table class=\"ds-table\">\n\t\t\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Name</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Auth Type</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Host</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Default Roles</th>\n\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t\t\t{#each users as user}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-800 dark:text-gray-200 font-medium\">{user.name}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-500 dark:text-gray-400\">{user.auth_type || '-'}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-500 dark:text-gray-400\">{user.host_ip || '-'}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-500 dark:text-gray-400\">{formatDefaultRoles(user.default_roles)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 dark:text-gray-400\">No users found</p>\n\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<!-- Roles -->\n\t\t\t\t\t\t\t<div class=\"ds-card p-6\">\n\t\t\t\t\t\t\t\t<h3 class=\"text-lg font-semibold text-gray-900 dark:text-white mb-4\">Roles</h3>\n\t\t\t\t\t\t\t\t{#if roles.length > 0}\n\t\t\t\t\t\t\t\t\t<div class=\"grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4\">\n\t\t\t\t\t\t\t\t\t\t{#each roles as role}\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"bg-gray-50 dark:bg-gray-900 rounded-lg p-4 border border-gray-200 dark:border-gray-800\">\n\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"font-medium text-gray-900 dark:text-white\">{role.name}</p>\n\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 dark:text-gray-400\">No roles found</p>\n\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<!-- Access Matrix -->\n\t\t\t\t\t\t\t<div class=\"ds-card p-6\">\n\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-2 mb-4\">\n\t\t\t\t\t\t\t\t\t<h3 class=\"text-lg font-semibold text-gray-900 dark:text-white\">Access Matrix</h3>\n\t\t\t\t\t\t\t\t\t<HelpTip text=\"Live materialized user/role grants. Use filters to narrow scope; table stays contained with vertical scroll to avoid page overflow.\" />\n\t\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t\t<!-- Filters -->\n\t\t\t\t\t\t\t\t<div class=\"flex flex-col md:flex-row gap-4 mb-4\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex-1\">\n\t\t\t\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\t\t\t\ttype=\"text\"\n\t\t\t\t\t\t\t\t\t\t\tplaceholder=\"Filter by user...\"\n\t\t\t\t\t\t\t\t\t\t\tbind:value={accessUserFilter}\n\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-input\"\n\t\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<div class=\"flex-1\">\n\t\t\t\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\t\t\t\ttype=\"text\"\n\t\t\t\t\t\t\t\t\t\t\tplaceholder=\"Filter by database...\"\n\t\t\t\t\t\t\t\t\t\t\tbind:value={accessDatabaseFilter}\n\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-input\"\n\t\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t\t{#if filteredAccessMatrix.length > 0}\n\t\t\t\t\t\t\t\t\t<div class=\"overflow-x-auto max-h-[60vh] overflow-y-auto rounded-lg border border-gray-200 dark:border-gray-800\">\n\t\t\t\t\t\t\t\t\t\t<table class=\"ds-table table-fixed min-w-[900px]\">\n\t\t\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row sticky top-0 z-10 bg-gray-50 dark:bg-gray-900\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">User</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Database</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Table</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Privilege</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Grant Option</th>\n\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t\t\t{#each filteredAccessMatrix as entry}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-800 dark:text-gray-200 font-medium truncate\">{entry.user_name}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-500 dark:text-gray-400 truncate\">{entry.database_name || '*'}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-500 dark:text-gray-400 truncate\">{entry.table_name || '*'}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"inline-flex max-w-full items-center px-1.5 py-0.5 rounded border border-orange-200 bg-orange-100 text-orange-900 dark:border-orange-700/60 dark:bg-orange-500/15 dark:text-orange-200 text-[11px] truncate\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{entry.privilege}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-3 text-gray-500 dark:text-gray-400\">{entry.is_direct_grant ? 'Direct' : 'Inherited'}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 dark:text-gray-400\">No access grants found</p>\n\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t</div>\n\t\t\t\t\t{/if}\n\n\t\t\t\t\t<!-- Incidents Tab -->\n\t\t\t\t\t{#if activeTab === 'incidents'}\n\t\t\t\t\t\t<div class=\"space-y-4\">\n\t\t\t\t\t\t\t<div class=\"ds-card p-4\">\n\t\t\t\t\t\t\t\t<div class=\"flex flex-col gap-3 md:flex-row md:items-center md:justify-between\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex flex-col gap-3 md:flex-row md:items-center md:gap-3\">\n\t\t\t\t\t\t\t\t\t\t<div class=\"w-full md:w-48\">\n\t\t\t\t\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\t\t\t\t\toptions={incidentStatusOptions}\n\t\t\t\t\t\t\t\t\t\t\t\tvalue={incidentStatusFilter}\n\t\t\t\t\t\t\t\t\t\t\t\tonChange={(v) => incidentStatusFilter = v}\n\t\t\t\t\t\t\t\t\t\t\t\tplaceholder=\"All Statuses\"\n\t\t\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t<div class=\"w-full md:w-48\">\n\t\t\t\t\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\t\t\t\t\toptions={incidentSeverityOptions}\n\t\t\t\t\t\t\t\t\t\t\t\tvalue={incidentSeverityFilter}\n\t\t\t\t\t\t\t\t\t\t\t\tonChange={(v) => incidentSeverityFilter = v}\n\t\t\t\t\t\t\t\t\t\t\t\tplaceholder=\"All Severities\"\n\t\t\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t<button class=\"ds-btn-primary px-3 py-2\" onclick={() => loadIncidents()}>Apply Filters</button>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<button class=\"ds-btn-outline px-3 py-2\" onclick={() => incidentCreateSheetOpen = true}>\n\t\t\t\t\t\t\t\t\t\t<Plus class=\"w-3.5 h-3.5\" />\n\t\t\t\t\t\t\t\t\t\tNew Incident\n\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<div class=\"ds-card p-4\">\n\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between mb-3\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-2\">\n\t\t\t\t\t\t\t\t\t\t<h3 class=\"text-sm font-semibold text-gray-900 dark:text-white\">Incident Queue</h3>\n\t\t\t\t\t\t\t\t\t\t<HelpTip text=\"Operational queue for governance issues. Open incidents in the sheet to triage, assign, resolve, and collaborate via comments.\" />\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500 dark:text-gray-400\">{incidents.length} incidents</span>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{#if incidents.length > 0}\n\t\t\t\t\t\t\t\t\t<div class=\"overflow-x-auto\">\n\t\t\t\t\t\t\t\t\t\t<table class=\"ds-table text-xs\">\n\t\t\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Title</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Severity</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Status</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Occurrences</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Assignee</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Last Seen</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-right\">Details</th>\n\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t\t\t{#each incidents as incident}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-gray-900 dark:text-white font-medium\">{incident.title}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class={`inline-flex items-center px-2 py-0.5 rounded-full text-[11px] font-medium ${\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tincident.severity === 'critical'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: incident.severity === 'error'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-orange-100 text-orange-800 dark:bg-orange-900 dark:text-orange-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: incident.severity === 'warn'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: 'bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t}`}>{incident.severity}</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-gray-700 dark:text-gray-300\">{incident.status}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-gray-700 dark:text-gray-300\">{incident.occurrence_count}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-gray-700 dark:text-gray-300\">{incident.assignee || '-'}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-gray-500 dark:text-gray-400\">{formatTime(incident.last_seen_at)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-right\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<button class=\"ds-btn-outline px-2.5 py-1\" onclick={() => openIncidentDetails(incident)}>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<PanelRightOpen class=\"w-3.5 h-3.5\" />\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tOpen\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t<div class=\"text-center py-8\">\n\t\t\t\t\t\t\t\t\t\t<Siren class=\"w-8 h-8 mx-auto text-gray-400 mb-2\" />\n\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 dark:text-gray-400\">No incidents found for current filters</p>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t</div>\n\t\t\t\t\t{/if}\n\n\t\t\t\t\t<!-- Policies Tab -->\n\t\t\t\t\t{#if activeTab === 'policies'}\n\t\t\t\t\t\t<div class=\"space-y-3\">\n\t\t\t\t\t\t\t<!-- Policy List -->\n\t\t\t\t\t\t\t\t<div class=\"ds-card\">\n\t\t\t\t\t\t\t\t<div class=\"px-4 py-3 border-b border-gray-200 dark:border-gray-700\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t\t\t\t\t<h3 class=\"text-sm font-semibold text-gray-900 dark:text-white\">Policies</h3>\n\t\t\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\t\t\tonclick={() => openPolicyForm()}\n\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-btn-primary px-3 py-1.5\"\n\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t<Plus class=\"w-3.5 h-3.5 mr-1.5\" />\n\t\t\t\t\t\t\t\t\t\t\tCreate Policy\n\t\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t\t<div class=\"px-4 py-3\">\n\t\t\t\t\t\t\t\t\t{#if policies.length > 0}\n\t\t\t\t\t\t\t\t\t\t<div class=\"overflow-x-auto\">\n\t\t\t\t\t\t\t\t\t\t\t<table class=\"ds-table text-xs\">\n\t\t\t\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Policy</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Scope</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Role</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Severity</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Mode</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Status</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Updated</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-right\">Actions</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t\t\t\t{#each policies as policy}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"font-semibold text-gray-900 dark:text-white\">{policy.name}</p>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{#if policy.description}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-[11px] text-gray-500 dark:text-gray-400 mt-0.5\">{truncate(policy.description, 70)}</p>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-gray-600 dark:text-gray-300\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{policy.object_type}{policy.object_database ? ` / ${policy.object_database}` : ''}{policy.object_table ? `.${policy.object_table}` : ''}{policy.object_column ? `.${policy.object_column}` : ''}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-gray-600 dark:text-gray-300\">{policy.required_role || '-'}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass={`inline-flex items-center px-2 py-0.5 rounded-full text-[11px] font-medium ${\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tpolicy.severity === 'critical'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: policy.severity === 'warn'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: 'bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t}`}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{policy.severity}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass={`inline-flex items-center px-2 py-0.5 rounded-full text-[11px] font-medium ${\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tpolicy.enforcement_mode === 'block'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: 'bg-gray-100 text-gray-700 dark:bg-gray-700 dark:text-gray-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t}`}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{policy.enforcement_mode}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass={`inline-flex items-center px-2 py-0.5 rounded-full text-[11px] font-medium ${\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tpolicy.enabled\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: 'bg-gray-100 text-gray-700 dark:bg-gray-700 dark:text-gray-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t}`}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{policy.enabled ? 'enabled' : 'disabled'}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-gray-500 dark:text-gray-400\">{formatTime(policy.updated_at)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"flex justify-end gap-1\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tonclick={() => openPolicyForm(policy)}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-icon-btn\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttitle=\"Edit policy\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<Edit class=\"w-3.5 h-3.5\" />\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tonclick={() => requestDeletePolicy(policy)}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-icon-btn hover:text-red-500 dark:hover:text-red-400\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttitle=\"Delete policy\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<Trash2 class=\"w-3.5 h-3.5\" />\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t\t<div class=\"text-center py-6\">\n\t\t\t\t\t\t\t\t\t\t\t<Shield class=\"w-8 h-8 mx-auto text-gray-400 mb-2\" />\n\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 dark:text-gray-400\">No policies configured</p>\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<!-- Violations -->\n\t\t\t\t\t\t\t\t<div class=\"ds-card\">\n\t\t\t\t\t\t\t\t<div class=\"px-4 py-3 border-b border-gray-200 dark:border-gray-700\">\n\t\t\t\t\t\t\t\t\t<h3 class=\"text-sm font-semibold text-gray-900 dark:text-white\">Policy Violations</h3>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{#if violations.length > 0}\n\t\t\t\t\t\t\t\t\t<div class=\"overflow-x-auto\">\n\t\t\t\t\t\t\t\t\t\t<table class=\"ds-table text-xs\">\n\t\t\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Policy</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Severity</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">User</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Detail</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Detected</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-right\">Actions</th>\n\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t\t\t{#each violations as violation}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-gray-900 dark:text-white font-medium\">{violation.policy_name}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass={`inline-flex items-center px-2 py-0.5 rounded-full text-[11px] font-medium ${\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tviolation.severity === 'critical'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: violation.severity === 'warn'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t? 'bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t: 'bg-blue-100 text-blue-800 dark:bg-blue-900 dark:text-blue-200'\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t}`}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{violation.severity}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-gray-600 dark:text-gray-300\">{violation.ch_user}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-gray-600 dark:text-gray-300\">{truncate(violation.violation_detail, 120)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-gray-500 dark:text-gray-400\">{formatTime(violation.detected_at)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-3 py-2.5 text-right\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<button class=\"ds-btn-outline px-2 py-1\" onclick={() => createIncidentFromViolation(violation)}>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<MessageSquare class=\"w-3.5 h-3.5\" />\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tCreate Incident\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t<div class=\"text-center py-6\">\n\t\t\t\t\t\t\t\t\t\t<AlertTriangle class=\"w-8 h-8 mx-auto text-gray-400 mb-2\" />\n\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 dark:text-gray-400\">No violations detected</p>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t</div>\n\t\t\t\t\t\t{/if}\n\t\t\t\t\t{/if}\n\n\t\t\t\t\t<!-- Query Log Tab -->\n\t\t\t\t\t{#if activeTab === 'querylog'}\n\t\t\t\t\t\t<div class=\"flex flex-wrap items-center gap-2 mb-3\">\n\t\t\t\t\t\t\t<div class=\"w-36\">\n\t\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\t\toptions={qlTimeRangeOptions}\n\t\t\t\t\t\t\t\t\tvalue={qlTimeRange}\n\t\t\t\t\t\t\t\t\tonChange={(v) => qlTimeRange = v}\n\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\ttype=\"text\"\n\t\t\t\t\t\t\t\tplaceholder=\"Search query or user...\"\n\t\t\t\t\t\t\t\tclass=\"ds-input-sm w-48\"\n\t\t\t\t\t\t\t\tbind:value={qlSearch}\n\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t<div class=\"w-36\">\n\t\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\t\tvalue={qlQueryKind}\n\t\t\t\t\t\t\t\t\tplaceholder=\"All kinds\"\n\t\t\t\t\t\t\t\t\toptions={[\n\t\t\t\t\t\t\t\t\t\t{ value: '', label: 'All kinds', keywords: 'all' },\n\t\t\t\t\t\t\t\t\t\t{ value: 'Select', label: 'Select' },\n\t\t\t\t\t\t\t\t\t\t{ value: 'Insert', label: 'Insert' },\n\t\t\t\t\t\t\t\t\t\t{ value: 'Create', label: 'Create' },\n\t\t\t\t\t\t\t\t\t\t{ value: 'Alter', label: 'Alter' },\n\t\t\t\t\t\t\t\t\t\t{ value: 'Drop', label: 'Drop' },\n\t\t\t\t\t\t\t\t\t]}\n\t\t\t\t\t\t\t\t\tonChange={(v) => qlQueryKind = v}\n\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t<div class=\"w-32\">\n\t\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\t\tvalue={qlStatus}\n\t\t\t\t\t\t\t\t\tplaceholder=\"All status\"\n\t\t\t\t\t\t\t\t\toptions={[\n\t\t\t\t\t\t\t\t\t\t{ value: '', label: 'All status', keywords: 'all' },\n\t\t\t\t\t\t\t\t\t\t{ value: 'success', label: 'Success' },\n\t\t\t\t\t\t\t\t\t\t{ value: 'error', label: 'Error' },\n\t\t\t\t\t\t\t\t\t]}\n\t\t\t\t\t\t\t\t\tonChange={(v) => qlStatus = v}\n\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\tclass=\"ds-btn-primary\"\n\t\t\t\t\t\t\t\tonclick={() => { qlOffset = 0; loadQueryLog() }}\n\t\t\t\t\t\t\t>Search</button>\n\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\tclass=\"ds-btn-ghost\"\n\t\t\t\t\t\t\t\tonclick={() => loadQueryLog()}\n\t\t\t\t\t\t\t\ttitle=\"Refresh\"\n\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t<RefreshCw size={14} />\n\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t{#if queryLogLoading}\n\t\t\t\t\t\t\t<div class=\"flex items-center justify-center py-12\"><Spinner /></div>\n\t\t\t\t\t\t{:else if queryLogData.length === 0}\n\t\t\t\t\t\t\t<p class=\"text-center text-sm text-gray-500 py-8\">No query log entries found</p>\n\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t<div class=\"ds-table-wrap\">\n\t\t\t\t\t\t\t\t<table class=\"ds-table\">\n\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row\">\n\t\t\t\t\t\t\t\t\t\t\t<th class=\"w-6\"></th>\n\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Time</th>\n\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">User</th>\n\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Query</th>\n\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-right\">Duration</th>\n\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-right\">Rows</th>\n\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Status</th>\n\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t{#each queryLogData as row, i}\n\t\t\t\t\t\t\t\t\t\t\t<tr\n\t\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-table-row cursor-pointer\"\n\t\t\t\t\t\t\t\t\t\t\t\tonclick={() => expandedRow = expandedRow === i ? null : i}\n\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"py-2 px-1 text-gray-400\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t{#if expandedRow === i}<ChevronDown size={12} />{:else}<ChevronRight size={12} />{/if}\n\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono whitespace-nowrap\">{formatTime(row.event_time)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono\">{row.user}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono max-w-xs truncate\">{truncate(row.query ?? '', 60)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-right whitespace-nowrap\">{row.query_duration_ms}ms</td>\n\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-right\">{row.read_rows ?? 0}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t{#if row.exception_code === 0}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"ds-badge ds-badge-success\">OK</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"ds-badge ds-badge-danger\">Error</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t{#if expandedRow === i}\n\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"bg-gray-50 dark:bg-gray-900/50 border-b border-gray-100 dark:border-gray-900\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<td colspan=\"7\" class=\"p-3\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<pre class=\"text-xs text-gray-700 dark:text-gray-300 font-mono whitespace-pre-wrap break-all rounded p-3 border border-gray-200 dark:border-gray-800 bg-gray-100 dark:bg-gray-900\">{row.query}</pre>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{#if row.exception}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-xs text-red-500 mt-2\"><strong>Error:</strong> {row.exception}</p>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<div class=\"flex items-center justify-between mt-3\">\n\t\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500\">Showing {qlOffset + 1}–{qlOffset + queryLogData.length}</span>\n\t\t\t\t\t\t\t\t<div class=\"flex gap-2\">\n\t\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\t\tclass=\"ds-btn-outline disabled:opacity-50\"\n\t\t\t\t\t\t\t\t\t\tdisabled={qlOffset === 0}\n\t\t\t\t\t\t\t\t\t\tonclick={() => { qlOffset = Math.max(0, qlOffset - qlLimit); loadQueryLog() }}\n\t\t\t\t\t\t\t\t\t>Prev</button>\n\t\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\t\tclass=\"ds-btn-outline disabled:opacity-50\"\n\t\t\t\t\t\t\t\t\t\tdisabled={queryLogData.length < qlLimit}\n\t\t\t\t\t\t\t\t\t\tonclick={() => { qlOffset += qlLimit; loadQueryLog() }}\n\t\t\t\t\t\t\t\t\t>Next</button>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t{/if}\n\t\t\t\t\t{/if}\n\n\t\t\t\t\t<!-- Alerts Tab -->\n\t\t\t\t\t{#if activeTab === 'alerts'}\n\t\t\t\t\t\t{#if alertsLoading}\n\t\t\t\t\t\t\t<div class=\"flex items-center justify-center py-12\"><Spinner /></div>\n\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t<div class=\"flex flex-col md:flex-row md:items-center md:justify-between gap-3 mb-4\">\n\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-2\">\n\t\t\t\t\t\t\t\t\t<Bell size={16} class=\"text-ch-blue\" />\n\t\t\t\t\t\t\t\t\t<h2 class=\"text-sm font-semibold text-gray-700 dark:text-gray-300\">Alerting Control Center</h2>\n\t\t\t\t\t\t\t\t\t<HelpTip text=\"Define delivery channels and route policies. Complex create flows live in sheets to keep this page clean and easy to scan.\" />\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t<div class=\"flex flex-wrap items-center gap-2\">\n\t\t\t\t\t\t\t\t\t<button class=\"ds-btn-outline\" onclick={() => channelSheetOpen = true}>New Channel</button>\n\t\t\t\t\t\t\t\t\t<button class=\"ds-btn-outline\" onclick={() => ruleSheetOpen = true}>New Rule</button>\n\t\t\t\t\t\t\t\t\t<button class=\"ds-btn-outline\" onclick={() => loadAlertsAdmin()} title=\"Refresh\">\n\t\t\t\t\t\t\t\t\t\t<RefreshCw size={14} />\n\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<div class=\"ds-card p-3 mb-4\">\n\t\t\t\t\t\t\t\t<div class=\"flex flex-col md:flex-row gap-2 md:items-center md:justify-between\">\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-2\">\n\t\t\t\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500\">Test recipients</span>\n\t\t\t\t\t\t\t\t\t\t<HelpTip text=\"Used by the Test action on each channel. Enter comma-separated emails once, then test quickly.\" />\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<div class=\"w-full md:w-[520px]\">\n\t\t\t\t\t\t\t\t\t\t<input class=\"ds-input-sm\" placeholder=\"email1@company.com, email2@company.com\" bind:value={alertTestRecipients} />\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<div class=\"ds-card p-3 mb-4\">\n\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-2 mb-2\">\n\t\t\t\t\t\t\t\t\t<h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">Channels</h3>\n\t\t\t\t\t\t\t\t\t<HelpTip text=\"Channels are provider credentials (SMTP, Resend, Brevo). Routes reference these channels for delivery.\" />\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{#if alertChannels.length === 0}\n\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 py-4\">No alert channels configured.</p>\n\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t<div class=\"ds-table-wrap max-h-[30vh] overflow-auto rounded border border-gray-200 dark:border-gray-800\">\n\t\t\t\t\t\t\t\t\t\t<table class=\"ds-table\">\n\t\t\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row sticky top-0 bg-gray-50 dark:bg-gray-900 z-10\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Name</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Type</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Active</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Secret</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-right\">Actions</th>\n\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t\t\t{#each alertChannels as channel}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-strong\">{channel.name}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono\">{channel.channel_type}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttype=\"checkbox\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-checkbox\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tchecked={channel.is_active}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tonchange={(e) => toggleAlertChannel(channel, (e.target as HTMLInputElement).checked)}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td\">{channel.has_secret ? 'Configured' : 'Missing'}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-right\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"flex justify-end gap-2\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<button class=\"ds-btn-outline\" onclick={() => testAlertChannelRecord(channel)}>Test</button>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<button class=\"text-xs text-red-500 hover:text-red-700\" onclick={() => deleteAlertChannelRecord(channel)}>Delete</button>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<div class=\"ds-card p-3 mb-4\">\n\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-2 mb-2\">\n\t\t\t\t\t\t\t\t\t<h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">Rules</h3>\n\t\t\t\t\t\t\t\t\t<HelpTip text=\"Each rule watches event types/severity and contains one or more routes. Expand a rule to inspect recipients, digest windows, and escalation.\" />\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{#if alertRules.length === 0}\n\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 py-4\">No alert rules configured.</p>\n\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t<div class=\"space-y-2\">\n\t\t\t\t\t\t\t\t\t\t{#each alertRules as rule}\n\t\t\t\t\t\t\t\t\t\t\t<details class=\"ds-card overflow-hidden\">\n\t\t\t\t\t\t\t\t\t\t\t\t<summary class=\"cursor-pointer list-none px-3 py-2.5 flex items-center justify-between bg-gray-50 dark:bg-gray-900\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"min-w-0\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm font-semibold text-gray-900 dark:text-gray-100 truncate\">{rule.name}</p>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-[11px] text-gray-500\">{rule.event_type} · min {rule.severity_min} · {rule.routes.length} routes</p>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-2\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<label class=\"ds-checkbox-label text-xs\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttype=\"checkbox\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-checkbox\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tchecked={rule.enabled}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tonchange={(e) => toggleAlertRule(rule, (e.target as HTMLInputElement).checked)}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tEnabled\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</label>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<button class=\"text-xs text-red-500 hover:text-red-700\" onclick={(e) => { e.preventDefault(); deleteAlertRuleRecord(rule) }}>Delete</button>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t</summary>\n\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"border-t border-gray-200 dark:border-gray-800 p-3\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"grid grid-cols-2 md:grid-cols-4 gap-2 mb-3 text-xs\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"ds-panel-muted p-2\"><span class=\"text-gray-500\">Cooldown</span><div class=\"font-medium text-gray-800 dark:text-gray-200\">{rule.cooldown_seconds}s</div></div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"ds-panel-muted p-2\"><span class=\"text-gray-500\">Max Attempts</span><div class=\"font-medium text-gray-800 dark:text-gray-200\">{rule.max_attempts}</div></div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"ds-panel-muted p-2\"><span class=\"text-gray-500\">Subject Template</span><div class=\"font-medium text-gray-800 dark:text-gray-200 truncate\">{rule.subject_template || 'Default'}</div></div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"ds-panel-muted p-2\"><span class=\"text-gray-500\">Body Template</span><div class=\"font-medium text-gray-800 dark:text-gray-200 truncate\">{rule.body_template || 'Default'}</div></div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"overflow-x-auto rounded border border-gray-200 dark:border-gray-800\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<table class=\"ds-table text-xs min-w-[980px]\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row bg-gray-50 dark:bg-gray-900\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Channel</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Recipients</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Delivery</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Escalation</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Active</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{#each rule.routes as route}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono\">{route.channel_name} ({route.channel_type})</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono truncate max-w-xs\">{route.recipients.join(', ')}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono\">{route.delivery_mode}{route.delivery_mode === 'digest' ? ` (${route.digest_window_minutes}m)` : ''}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{#if route.escalation_channel_name}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{route.escalation_channel_name}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{#if route.escalation_after_failures > 0}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"text-gray-500\"> after {route.escalation_after_failures} fail</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t—\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td\">{route.is_active ? 'yes' : 'no'}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t</details>\n\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<div class=\"ds-card p-3\">\n\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-2 mb-2\">\n\t\t\t\t\t\t\t\t\t<h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">Recent Alert Events</h3>\n\t\t\t\t\t\t\t\t\t<div class=\"w-28\">\n\t\t\t\t\t\t\t\t\t\t<input class=\"ds-input-sm\" type=\"number\" bind:value={alertEventLimit} />\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<button class=\"ds-btn-outline\" onclick={() => loadAlertsAdmin()}>Refresh</button>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{#if alertEvents.length === 0}\n\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 py-4\">No alert events yet.</p>\n\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t<div class=\"ds-table-wrap max-h-[30vh] overflow-auto rounded border border-gray-200 dark:border-gray-800\">\n\t\t\t\t\t\t\t\t\t\t<table class=\"ds-table\">\n\t\t\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row sticky top-0 bg-gray-50 dark:bg-gray-900 z-10\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Time</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Type</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Severity</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Title</th>\n\t\t\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Status</th>\n\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t\t\t{#each alertEvents as evt}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono\">{formatTime(evt.created_at)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono\">{evt.event_type}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono\">{evt.severity}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td\">{evt.title}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono\">{evt.status}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t{/if}\n\t\t\t\t\t{/if}\n\n\t\t\t\t\t<!-- Audit Log Tab -->\n\t\t\t\t\t{#if activeTab === 'auditlog'}\n\t\t\t\t\t\t<div class=\"flex flex-wrap items-center gap-2 mb-3\">\n\t\t\t\t\t\t\t<div class=\"w-32\">\n\t\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\t\toptions={auditLimitOptions}\n\t\t\t\t\t\t\t\t\tvalue={String(auditLimit)}\n\t\t\t\t\t\t\t\t\tonChange={(v) => { auditLimit = Number(v) || 100; void loadAuditLogs() }}\n\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t<div class=\"w-36\">\n\t\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\t\toptions={auditTimeRangeOptions}\n\t\t\t\t\t\t\t\t\tvalue={auditTimeRange}\n\t\t\t\t\t\t\t\t\tplaceholder=\"All time\"\n\t\t\t\t\t\t\t\t\tonChange={(v) => auditTimeRange = v}\n\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\ttype=\"text\"\n\t\t\t\t\t\t\t\tplaceholder=\"Search action, user, details, IP...\"\n\t\t\t\t\t\t\t\tclass=\"ds-input-sm w-64\"\n\t\t\t\t\t\t\t\tbind:value={auditSearch}\n\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t<div class=\"w-48\">\n\t\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\t\toptions={auditActionOptions}\n\t\t\t\t\t\t\t\t\tvalue={auditAction}\n\t\t\t\t\t\t\t\t\tplaceholder=\"All actions\"\n\t\t\t\t\t\t\t\t\tonChange={(v) => auditAction = v}\n\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t<div class=\"w-40\">\n\t\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\t\toptions={auditUsernameOptions}\n\t\t\t\t\t\t\t\t\tvalue={auditUsername}\n\t\t\t\t\t\t\t\t\tplaceholder=\"All users\"\n\t\t\t\t\t\t\t\t\tonChange={(v) => auditUsername = v}\n\t\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\tclass=\"ds-btn-primary\"\n\t\t\t\t\t\t\t\tonclick={() => loadAuditLogs()}\n\t\t\t\t\t\t\t>Search</button>\n\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\tclass=\"ds-btn-ghost\"\n\t\t\t\t\t\t\t\tonclick={() => loadAuditLogs()}\n\t\t\t\t\t\t\t\ttitle=\"Refresh\"\n\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t<RefreshCw size={14} />\n\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t{#if auditLoading}\n\t\t\t\t\t\t\t<div class=\"flex items-center justify-center py-12\"><Spinner /></div>\n\t\t\t\t\t\t{:else if auditLogs.length === 0}\n\t\t\t\t\t\t\t<p class=\"text-center text-sm text-gray-500 py-8\">No audit logs found</p>\n\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t<div class=\"ds-table-wrap\">\n\t\t\t\t\t\t\t\t<table class=\"ds-table\">\n\t\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row\">\n\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Timestamp</th>\n\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Action</th>\n\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">User</th>\n\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">Details</th>\n\t\t\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th\">IP</th>\n\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t\t{#each auditLogs as log}\n\t\t\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono whitespace-nowrap\">{formatTime(log.created_at)}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"ds-badge ds-badge-neutral font-mono\">{log.action}</span>\n\t\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono\">{log.username ?? '—'}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono max-w-xs truncate\">{log.details ?? '—'}</td>\n\t\t\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-mono\">{log.ip_address ?? '—'}</td>\n\t\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t{/if}\n\t\t\t\t\t{/if}\n\n\t\t\t\t\t<!-- Settings Tab -->\n\t\t\t\t\t{#if activeTab === 'settings'}\n\t\t\t\t\t\t<div class=\"space-y-4\">\n\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t<h2 class=\"text-lg font-semibold text-gray-900 dark:text-gray-100\">Governance Sync Settings</h2>\n\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 mt-1\">\n\t\t\t\t\t\t\t\t\tControl the background syncer that collects metadata, query history, and access data from your ClickHouse cluster.\n\t\t\t\t\t\t\t\t</p>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<!-- Status card -->\n\t\t\t\t\t\t\t<div class=\"ds-card p-4\">\n\t\t\t\t\t\t\t\t{#if govSettingsLoading && !govSettings}\n\t\t\t\t\t\t\t\t\t<div class=\"flex items-center justify-center py-6\"><Spinner /></div>\n\t\t\t\t\t\t\t\t{:else if govSettings}\n\t\t\t\t\t\t\t\t\t<div class=\"flex flex-wrap items-start justify-between gap-4\">\n\t\t\t\t\t\t\t\t\t\t<div class=\"space-y-2\">\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-2\">\n\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"text-sm font-medium text-gray-700 dark:text-gray-300\">Status:</span>\n\t\t\t\t\t\t\t\t\t\t\t\t{#if govSettings.sync_enabled}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"ds-badge ds-badge-success\">Enabled</span>\n\t\t\t\t\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"ds-badge ds-badge-neutral\">Disabled</span>\n\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t\t{#if govSettings.syncer_running}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"ds-badge ds-badge-info\">Running</span>\n\t\t\t\t\t\t\t\t\t\t\t\t{:else if govSettings.sync_enabled}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"ds-badge ds-badge-warning\">Idle</span>\n\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t{#if govSettings.updated_at}\n\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-xs text-gray-500\">\n\t\t\t\t\t\t\t\t\t\t\t\t\tLast changed {formatTime(govSettings.updated_at)}\n\t\t\t\t\t\t\t\t\t\t\t\t\t{#if govSettings.updated_by}by <span class=\"font-mono\">{govSettings.updated_by}</span>{/if}\n\t\t\t\t\t\t\t\t\t\t\t\t</p>\n\t\t\t\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-xs text-gray-500\">Never configured (default: disabled)</p>\n\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t<div class=\"flex items-center gap-2\">\n\t\t\t\t\t\t\t\t\t\t\t{#if govSettings.sync_enabled}\n\t\t\t\t\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-btn-secondary\"\n\t\t\t\t\t\t\t\t\t\t\t\t\tonclick={() => showDisableConfirm = true}\n\t\t\t\t\t\t\t\t\t\t\t\t\tdisabled={govToggleSaving}\n\t\t\t\t\t\t\t\t\t\t\t\t>Disable sync</button>\n\t\t\t\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-btn-primary\"\n\t\t\t\t\t\t\t\t\t\t\t\t\tonclick={() => showEnableConfirm = true}\n\t\t\t\t\t\t\t\t\t\t\t\t\tdisabled={govToggleSaving}\n\t\t\t\t\t\t\t\t\t\t\t\t>Enable sync</button>\n\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500\">Settings unavailable.</p>\n\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<!-- Disclosure panel -->\n\t\t\t\t\t\t\t<div class=\"ds-card p-4 space-y-4 border-blue-200 dark:border-blue-900/50 bg-blue-50/40 dark:bg-blue-950/20\">\n\t\t\t\t\t\t\t\t<div class=\"flex items-start gap-2\">\n\t\t\t\t\t\t\t\t\t<Info class=\"w-5 h-5 flex-shrink-0 mt-0.5 text-blue-600 dark:text-blue-400\" />\n\t\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t\t\t<h3 class=\"text-sm font-semibold text-gray-900 dark:text-gray-100\">What governance sync does</h3>\n\t\t\t\t\t\t\t\t\t\t<p class=\"text-xs text-gray-600 dark:text-gray-400 mt-0.5\">\n\t\t\t\t\t\t\t\t\t\t\tRead this before enabling. Sync runs every 5 minutes against your ClickHouse cluster.\n\t\t\t\t\t\t\t\t\t\t</p>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t\t<div class=\"grid gap-4 md:grid-cols-2\">\n\t\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t\t\t<h4 class=\"text-xs font-semibold uppercase tracking-wide text-gray-500 mb-1.5\">What it collects</h4>\n\t\t\t\t\t\t\t\t\t\t<ul class=\"text-sm text-gray-700 dark:text-gray-300 space-y-1 list-disc pl-5\">\n\t\t\t\t\t\t\t\t\t\t\t<li>Table and column metadata from <code class=\"font-mono text-xs\">system.tables</code> / <code class=\"font-mono text-xs\">system.columns</code></li>\n\t\t\t\t\t\t\t\t\t\t\t<li>Recent queries from <code class=\"font-mono text-xs\">system.query_log</code> (filtered: ≥10ms, no self-polls)</li>\n\t\t\t\t\t\t\t\t\t\t\t<li>Users, roles, and grants from <code class=\"font-mono text-xs\">system.users</code> / <code class=\"font-mono text-xs\">system.grants</code></li>\n\t\t\t\t\t\t\t\t\t\t</ul>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t\t\t<h4 class=\"text-xs font-semibold uppercase tracking-wide text-gray-500 mb-1.5\">Where it's stored</h4>\n\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-700 dark:text-gray-300\">\n\t\t\t\t\t\t\t\t\t\t\tLocal SQLite at <code class=\"font-mono text-xs\">./data/ch-ui.db</code>. Never sent externally.\n\t\t\t\t\t\t\t\t\t\t</p>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t\t\t<h4 class=\"text-xs font-semibold uppercase tracking-wide text-gray-500 mb-1.5\">How it authenticates</h4>\n\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-700 dark:text-gray-300\">\n\t\t\t\t\t\t\t\t\t\t\tBorrows ClickHouse credentials from an active admin session. Each borrow is recorded in the audit log\n\t\t\t\t\t\t\t\t\t\t\t(<code class=\"font-mono text-xs\">governance.credential_borrow</code>, rate-limited to once per connection per hour).\n\t\t\t\t\t\t\t\t\t\t</p>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t\t\t<h4 class=\"text-xs font-semibold uppercase tracking-wide text-gray-500 mb-1.5\">Retention</h4>\n\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-700 dark:text-gray-300\">\n\t\t\t\t\t\t\t\t\t\t\t30-day rolling window. Older query log and violation rows are pruned automatically at startup and every 5 minutes.\n\t\t\t\t\t\t\t\t\t\t</p>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t<p class=\"text-xs text-gray-500\">\n\t\t\t\t\t\t\t\tToggle changes are written to the audit log (<code class=\"font-mono text-xs\">governance.sync_toggle</code>).\n\t\t\t\t\t\t\t\tDisabling stops the syncer immediately; collected data is preserved.\n\t\t\t\t\t\t\t</p>\n\t\t\t\t\t\t</div>\n\t\t\t\t\t{/if}\n\n\t\t\t</div>\n\t\t</div>\n\t</div>\n\n\t<Sheet\n\t\topen={tableDetailSheetOpen}\n\t\ttitle={selectedTable ? `Table Details · ${selectedTable.database_name}.${selectedTable.table_name}` : 'Table Details'}\n\t\tsize=\"xl\"\n\t\tonclose={closeTableDetails}\n\t>\n\t\t{#if selectedTable}\n\t\t\t<div class=\"space-y-5\">\n\t\t\t\t<div class=\"grid grid-cols-2 lg:grid-cols-4 gap-3\">\n\t\t\t\t\t<div class=\"ds-panel-muted p-3\">\n\t\t\t\t\t\t<p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Engine</p>\n\t\t\t\t\t\t<p class=\"text-sm font-medium text-gray-900 dark:text-white mt-1\">{selectedTable.engine || '-'}</p>\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"ds-panel-muted p-3\">\n\t\t\t\t\t\t<p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Rows</p>\n\t\t\t\t\t\t<p class=\"text-sm font-medium text-gray-900 dark:text-white mt-1\">{selectedTable.total_rows.toLocaleString()}</p>\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"ds-panel-muted p-3\">\n\t\t\t\t\t\t<p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Size</p>\n\t\t\t\t\t\t<p class=\"text-sm font-medium text-gray-900 dark:text-white mt-1\">{formatBytes(selectedTable.total_bytes)}</p>\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"ds-panel-muted p-3\">\n\t\t\t\t\t\t<p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Partitions</p>\n\t\t\t\t\t\t<p class=\"text-sm font-medium text-gray-900 dark:text-white mt-1\">{selectedTable.partition_count}</p>\n\t\t\t\t\t</div>\n\t\t\t\t</div>\n\n\t\t\t\t<div class=\"ds-card overflow-hidden\">\n\t\t\t\t\t<div class=\"flex items-center justify-between px-4 py-3 border-b border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-900\">\n\t\t\t\t\t\t<h4 class=\"text-sm font-semibold text-gray-900 dark:text-white\">Columns</h4>\n\t\t\t\t\t\t<span class=\"text-xs text-gray-500 dark:text-gray-400\">{selectedTableColumns.length} total</span>\n\t\t\t\t\t</div>\n\t\t\t\t\t{#if tableDetailLoading}\n\t\t\t\t\t\t<div class=\"flex justify-center py-10\">\n\t\t\t\t\t\t\t<Spinner size=\"md\" />\n\t\t\t\t\t\t</div>\n\t\t\t\t\t{:else if selectedTableColumns.length > 0}\n\t\t\t\t\t\t<div class=\"overflow-x-auto\">\n\t\t\t\t\t\t\t<table class=\"ds-table\">\n\t\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t\t<tr class=\"border-b border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-900\">\n\t\t\t\t\t\t\t\t\t\t<th class=\"px-4 py-2 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase\">Column</th>\n\t\t\t\t\t\t\t\t\t\t<th class=\"px-4 py-2 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase\">Type</th>\n\t\t\t\t\t\t\t\t\t\t<th class=\"px-4 py-2 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase\">Default</th>\n\t\t\t\t\t\t\t\t\t\t<th class=\"px-4 py-2 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase\">Tags</th>\n\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t\t{#each selectedTableColumns as col}\n\t\t\t\t\t\t\t\t\t\t<tr class=\"border-b border-gray-100 dark:border-gray-900\">\n\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-4 py-2 font-medium text-gray-900 dark:text-white\">{col.column_name}</td>\n\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-4 py-2 text-gray-600 dark:text-gray-300\">{col.column_type}</td>\n\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-4 py-2 text-gray-500 dark:text-gray-400\">{col.default_expression || '-'}</td>\n\t\t\t\t\t\t\t\t\t\t\t<td class=\"px-4 py-2\">\n\t\t\t\t\t\t\t\t\t\t\t\t{#if col.tags?.length}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<div class=\"flex flex-wrap gap-1\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{#each col.tags as tag}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"inline-flex items-center px-2 py-0.5 rounded-full text-xs font-medium border border-orange-200 bg-orange-100 text-orange-900 dark:border-orange-700/60 dark:bg-orange-500/15 dark:text-orange-200\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t{tag}\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t</span>\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t\t\t\t\t\t\t<span class=\"text-gray-400\">-</span>\n\t\t\t\t\t\t\t\t\t\t\t\t{/if}\n\t\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t\t</table>\n\t\t\t\t\t\t</div>\n\t\t\t\t\t{:else}\n\t\t\t\t\t\t<p class=\"px-4 py-8 text-sm text-center text-gray-500 dark:text-gray-400\">No column metadata available.</p>\n\t\t\t\t\t{/if}\n\t\t\t\t</div>\n\n\t\t\t\t<div class=\"ds-card overflow-hidden\">\n\t\t\t\t\t<div class=\"px-4 py-3 border-b border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-900\">\n\t\t\t\t\t\t<h4 class=\"text-sm font-semibold text-gray-900 dark:text-white\">Governance Notes</h4>\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"p-4 space-y-3\">\n\t\t\t\t\t\t<div class=\"flex gap-2\">\n\t\t\t\t\t\t\t<textarea\n\t\t\t\t\t\t\t\tclass=\"ds-textarea flex-1\"\n\t\t\t\t\t\t\t\trows=\"2\"\n\t\t\t\t\t\t\t\tplaceholder=\"Add governance note for this table (owners, SLA, sensitivity, remediation steps)...\"\n\t\t\t\t\t\t\t\tbind:value={tableNoteDraft}\n\t\t\t\t\t\t\t></textarea>\n\t\t\t\t\t\t\t<button class=\"ds-btn-primary px-3 py-2 h-fit\" onclick={() => addTableNote()}>\n\t\t\t\t\t\t\t\t<Plus class=\"w-3.5 h-3.5\" />\n\t\t\t\t\t\t\t\tAdd\n\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t{#if tableNotes.length > 0}\n\t\t\t\t\t\t\t<div class=\"space-y-2 max-h-64 overflow-auto\">\n\t\t\t\t\t\t\t\t{#each tableNotes as note}\n\t\t\t\t\t\t\t\t\t<div class=\"rounded-lg border border-gray-200 dark:border-gray-800 p-3 bg-white dark:bg-gray-950\">\n\t\t\t\t\t\t\t\t\t\t<div class=\"flex items-start justify-between gap-3\">\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"min-w-0\">\n\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-800 dark:text-gray-200 whitespace-pre-wrap\">{note.comment_text}</p>\n\t\t\t\t\t\t\t\t\t\t\t\t<p class=\"text-xs text-gray-500 dark:text-gray-400 mt-1\">\n\t\t\t\t\t\t\t\t\t\t\t\t\t{note.created_by || 'unknown'} · {formatTime(note.created_at)}\n\t\t\t\t\t\t\t\t\t\t\t\t</p>\n\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t\t<button class=\"ds-icon-btn hover:text-red-500\" title=\"Delete note\" onclick={() => deleteTableNote(note.id)}>\n\t\t\t\t\t\t\t\t\t\t\t\t<Trash2 class=\"w-3.5 h-3.5\" />\n\t\t\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 dark:text-gray-400\">No table notes yet.</p>\n\t\t\t\t\t\t{/if}\n\t\t\t\t\t</div>\n\t\t\t\t</div>\n\t\t\t</div>\n\t\t{/if}\n\t</Sheet>\n\n\t<Sheet\n\t\topen={queryDetailSheetOpen}\n\t\ttitle={selectedQuery ? `Query Details · ${selectedQuery.ch_user}` : 'Query Details'}\n\t\tsize=\"xl\"\n\t\tonclose={closeQueryDetails}\n\t>\n\t\t{#if selectedQuery}\n\t\t\t<div class=\"space-y-5\">\n\t\t\t\t<div class=\"grid grid-cols-2 lg:grid-cols-4 gap-3\">\n\t\t\t\t\t<div class=\"ds-panel-muted p-3\">\n\t\t\t\t\t\t<p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Kind</p>\n\t\t\t\t\t\t<p class=\"text-sm font-medium text-gray-900 dark:text-white mt-1\">{selectedQuery.query_kind}</p>\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"ds-panel-muted p-3\">\n\t\t\t\t\t\t<p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Duration</p>\n\t\t\t\t\t\t<p class=\"text-sm font-medium text-gray-900 dark:text-white mt-1\">{selectedQuery.duration_ms} ms</p>\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"ds-panel-muted p-3\">\n\t\t\t\t\t\t<p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Read Rows</p>\n\t\t\t\t\t\t<p class=\"text-sm font-medium text-gray-900 dark:text-white mt-1\">{selectedQuery.read_rows.toLocaleString()}</p>\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"ds-panel-muted p-3\">\n\t\t\t\t\t\t<p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Timestamp</p>\n\t\t\t\t\t\t<p class=\"text-sm font-medium text-gray-900 dark:text-white mt-1\">{formatTime(selectedQuery.event_time)}</p>\n\t\t\t\t\t</div>\n\t\t\t\t</div>\n\n\t\t\t\t<div class=\"ds-card overflow-hidden\">\n\t\t\t\t\t<div class=\"px-4 py-3 border-b border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-900\">\n\t\t\t\t\t\t<h4 class=\"text-sm font-semibold text-gray-900 dark:text-white\">SQL</h4>\n\t\t\t\t\t</div>\n\t\t\t\t\t<pre class=\"p-4 text-xs font-mono text-gray-800 dark:text-gray-200 bg-white dark:bg-gray-950 overflow-x-auto\">{selectedQuery.query_text}</pre>\n\t\t\t\t</div>\n\n\t\t\t\t{#if selectedQuery.error_message}\n\t\t\t\t\t<div class=\"rounded-lg border border-red-200 dark:border-red-800 bg-red-50 dark:bg-red-900/20 p-4\">\n\t\t\t\t\t\t<p class=\"text-xs font-medium uppercase tracking-wide text-red-700 dark:text-red-300 mb-1\">Error</p>\n\t\t\t\t\t\t<p class=\"text-sm text-red-800 dark:text-red-200\">{selectedQuery.error_message}</p>\n\t\t\t\t\t</div>\n\t\t\t\t{/if}\n\t\t\t</div>\n\t\t{/if}\n\t</Sheet>\n\n\t<Sheet\n\t\topen={accessDetailSheetOpen}\n\t\ttitle={selectedOverPermissionGroup\n\t\t\t? `Permission Details · ${selectedOverPermissionGroup.userName}`\n\t\t\t: selectedOverPermission\n\t\t\t\t? `Permission Details · ${selectedOverPermission.user_name}`\n\t\t\t\t: 'Permission Details'}\n\t\tsize=\"md\"\n\t\tonclose={closeAccessDetails}\n\t>\n\t\t{#if selectedOverPermissionGroup}\n\t\t\t<div class=\"space-y-4\">\n\t\t\t\t<div class=\"grid grid-cols-3 gap-2\">\n\t\t\t\t\t<div class=\"ds-panel-muted p-3\">\n\t\t\t\t\t\t<p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Total Grants</p>\n\t\t\t\t\t\t<p class=\"text-base font-semibold text-gray-900 dark:text-white mt-1\">{selectedOverPermissionGroup.total}</p>\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"ds-panel-muted p-3\">\n\t\t\t\t\t\t<p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Databases</p>\n\t\t\t\t\t\t<p class=\"text-base font-semibold text-gray-900 dark:text-white mt-1\">{selectedOverPermissionGroup.databases}</p>\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"ds-panel-muted p-3\">\n\t\t\t\t\t\t<p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Top Severity</p>\n\t\t\t\t\t\t<p class=\"text-base font-semibold text-gray-900 dark:text-white mt-1\">{selectedOverPermissionGroup.topSeverity}</p>\n\t\t\t\t\t</div>\n\t\t\t\t</div>\n\n\t\t\t\t<div class=\"ds-card overflow-hidden\">\n\t\t\t\t\t<div class=\"px-3 py-2 border-b border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-900\">\n\t\t\t\t\t\t<h4 class=\"text-sm font-semibold text-gray-900 dark:text-white\">Risky Grants</h4>\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"max-h-[56vh] overflow-auto\">\n\t\t\t\t\t\t<table class=\"ds-table text-xs\">\n\t\t\t\t\t\t\t<thead>\n\t\t\t\t\t\t\t\t<tr class=\"ds-table-head-row\">\n\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-compact\">Database</th>\n\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-compact\">Privilege</th>\n\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-compact\">Reason</th>\n\t\t\t\t\t\t\t\t\t<th class=\"ds-table-th-right-compact\">Details</th>\n\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t</thead>\n\t\t\t\t\t\t\t<tbody>\n\t\t\t\t\t\t\t\t{#each selectedOverPermissionGroup.alerts as alert}\n\t\t\t\t\t\t\t\t\t<tr class=\"ds-table-row\">\n\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-compact-strong\">{alert.database_name || '*'}</td>\n\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-compact text-gray-700 dark:text-gray-300\">{alert.privilege}</td>\n\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-compact\">{truncate(alert.reason, 80)}</td>\n\t\t\t\t\t\t\t\t\t\t<td class=\"ds-td-right\">\n\t\t\t\t\t\t\t\t\t\t\t<button\n\t\t\t\t\t\t\t\t\t\t\t\tclass=\"ds-btn-outline px-2 py-0.5\"\n\t\t\t\t\t\t\t\t\t\t\t\tonclick={() => openAccessDetails(alert)}\n\t\t\t\t\t\t\t\t\t\t\t>\n\t\t\t\t\t\t\t\t\t\t\t\tOpen\n\t\t\t\t\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t\t\t\t\t</td>\n\t\t\t\t\t\t\t\t\t</tr>\n\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t</tbody>\n\t\t\t\t\t\t</table>\n\t\t\t\t\t</div>\n\t\t\t\t</div>\n\t\t\t</div>\n\t\t{:else if selectedOverPermission}\n\t\t\t<div class=\"space-y-4\">\n\t\t\t\t<div class=\"ds-panel-muted p-4\">\n\t\t\t\t\t<p class=\"text-xs uppercase tracking-wide text-gray-500 dark:text-gray-400 mb-2\">Grant Scope</p>\n\t\t\t\t\t<p class=\"text-sm text-gray-900 dark:text-white\">\n\t\t\t\t\t\t{selectedOverPermission.user_name} · {selectedOverPermission.database_name || '*'}.\n\t\t\t\t\t\t{selectedOverPermission.table_name || '*'} · {selectedOverPermission.privilege}\n\t\t\t\t\t</p>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"ds-panel-muted p-4\">\n\t\t\t\t\t<p class=\"text-xs uppercase tracking-wide text-gray-500 dark:text-gray-400 mb-2\">Reason</p>\n\t\t\t\t\t<p class=\"text-sm text-gray-700 dark:text-gray-300\">{selectedOverPermission.reason}</p>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"ds-panel-muted p-4\">\n\t\t\t\t\t<p class=\"text-xs uppercase tracking-wide text-gray-500 dark:text-gray-400 mb-2\">Last Query Activity</p>\n\t\t\t\t\t<p class=\"text-sm text-gray-700 dark:text-gray-300\">\n\t\t\t\t\t\t{selectedOverPermission.last_query_time\n\t\t\t\t\t\t\t? `${formatTime(selectedOverPermission.last_query_time)} (${selectedOverPermission.days_since_query ?? 0} days ago)`\n\t\t\t\t\t\t\t: 'No query usage found'}\n\t\t\t\t\t</p>\n\t\t\t\t</div>\n\t\t\t</div>\n\t\t{/if}\n\t</Sheet>\n\n\t<Sheet\n\t\topen={incidentCreateSheetOpen}\n\t\ttitle=\"Create Incident\"\n\t\tsize=\"lg\"\n\t\tonclose={() => incidentCreateSheetOpen = false}\n\t>\n\t\t<div class=\"space-y-3\">\n\t\t\t<div class=\"grid grid-cols-1 md:grid-cols-2 gap-3\">\n\t\t\t\t<div>\n\t\t\t\t\t<div class=\"ds-form-label\">Title</div>\n\t\t\t\t\t<input class=\"ds-input\" bind:value={incidentForm.title} placeholder=\"Incident title\" />\n\t\t\t\t</div>\n\t\t\t\t<div>\n\t\t\t\t\t<div class=\"ds-form-label\">Assignee</div>\n\t\t\t\t\t<input class=\"ds-input\" bind:value={incidentForm.assignee} placeholder=\"Optional assignee\" />\n\t\t\t\t</div>\n\t\t\t</div>\n\t\t\t<div class=\"grid grid-cols-1 md:grid-cols-2 gap-3\">\n\t\t\t\t<div>\n\t\t\t\t\t<div class=\"ds-form-label\">Severity</div>\n\t\t\t\t\t<Combobox\n\t\t\t\t\t\toptions={incidentSeverityOptions.filter((o) => o.value)}\n\t\t\t\t\t\tvalue={incidentForm.severity}\n\t\t\t\t\t\tonChange={(v) => incidentForm.severity = v}\n\t\t\t\t\t\tplaceholder=\"Severity\"\n\t\t\t\t\t/>\n\t\t\t\t</div>\n\t\t\t\t<div>\n\t\t\t\t\t<div class=\"ds-form-label\">Status</div>\n\t\t\t\t\t<Combobox\n\t\t\t\t\t\toptions={incidentStatusOptions.filter((o) => o.value)}\n\t\t\t\t\t\tvalue={incidentForm.status}\n\t\t\t\t\t\tonChange={(v) => incidentForm.status = v}\n\t\t\t\t\t\tplaceholder=\"Status\"\n\t\t\t\t\t/>\n\t\t\t\t</div>\n\t\t\t</div>\n\t\t\t<div>\n\t\t\t\t<div class=\"ds-form-label\">Details</div>\n\t\t\t\t<textarea class=\"ds-textarea\" rows=\"4\" bind:value={incidentForm.details} placeholder=\"What happened and what action is needed\"></textarea>\n\t\t\t</div>\n\t\t\t<div class=\"flex justify-end\">\n\t\t\t\t<button class=\"ds-btn-primary px-3 py-2\" onclick={() => createManualIncident()}>Create Incident</button>\n\t\t\t</div>\n\t\t</div>\n\t</Sheet>\n\n\t<Sheet\n\t\topen={incidentDetailSheetOpen}\n\t\ttitle={selectedIncident ? `Incident · ${selectedIncident.title}` : 'Incident'}\n\t\tsize=\"lg\"\n\t\tonclose={closeIncidentDetails}\n\t>\n\t\t{#if selectedIncident}\n\t\t\t<div class=\"space-y-4\">\n\t\t\t\t<div class=\"grid grid-cols-1 md:grid-cols-2 gap-3\">\n\t\t\t\t\t<div>\n\t\t\t\t\t\t<div class=\"ds-form-label\">Title</div>\n\t\t\t\t\t\t<input class=\"ds-input\" bind:value={selectedIncident.title} />\n\t\t\t\t\t</div>\n\t\t\t\t\t<div>\n\t\t\t\t\t\t<div class=\"ds-form-label\">Assignee</div>\n\t\t\t\t\t\t<input\n\t\t\t\t\t\t\tclass=\"ds-input\"\n\t\t\t\t\t\t\tvalue={selectedIncident.assignee ?? ''}\n\t\t\t\t\t\t\toninput={(e) => selectedIncident && (selectedIncident.assignee = (e.currentTarget as HTMLInputElement).value)}\n\t\t\t\t\t\t/>\n\t\t\t\t\t</div>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"grid grid-cols-1 md:grid-cols-2 gap-3\">\n\t\t\t\t\t<div>\n\t\t\t\t\t\t<div class=\"ds-form-label\">Severity</div>\n\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\toptions={incidentSeverityOptions.filter((o) => o.value)}\n\t\t\t\t\t\t\tvalue={selectedIncident.severity}\n\t\t\t\t\t\t\tonChange={(v) => selectedIncident && (selectedIncident.severity = v)}\n\t\t\t\t\t\t\tplaceholder=\"Severity\"\n\t\t\t\t\t\t/>\n\t\t\t\t\t</div>\n\t\t\t\t\t<div>\n\t\t\t\t\t\t<div class=\"ds-form-label\">Status</div>\n\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\toptions={incidentStatusOptions.filter((o) => o.value)}\n\t\t\t\t\t\t\tvalue={selectedIncident.status}\n\t\t\t\t\t\t\tonChange={(v) => selectedIncident && (selectedIncident.status = v)}\n\t\t\t\t\t\t\tplaceholder=\"Status\"\n\t\t\t\t\t\t/>\n\t\t\t\t\t</div>\n\t\t\t\t</div>\n\t\t\t\t<div>\n\t\t\t\t\t<div class=\"ds-form-label\">Details</div>\n\t\t\t\t\t<textarea\n\t\t\t\t\t\tclass=\"ds-textarea\"\n\t\t\t\t\t\trows=\"3\"\n\t\t\t\t\t\tvalue={selectedIncident.details ?? ''}\n\t\t\t\t\t\toninput={(e) => selectedIncident && (selectedIncident.details = (e.currentTarget as HTMLTextAreaElement).value)}\n\t\t\t\t\t></textarea>\n\t\t\t\t</div>\n\t\t\t\t<div>\n\t\t\t\t\t<div class=\"ds-form-label\">Resolution Note</div>\n\t\t\t\t\t<textarea\n\t\t\t\t\t\tclass=\"ds-textarea\"\n\t\t\t\t\t\trows=\"2\"\n\t\t\t\t\t\tvalue={selectedIncident.resolution_note ?? ''}\n\t\t\t\t\t\toninput={(e) => selectedIncident && (selectedIncident.resolution_note = (e.currentTarget as HTMLTextAreaElement).value)}\n\t\t\t\t\t></textarea>\n\t\t\t\t</div>\n\t\t\t\t<div class=\"flex justify-end\">\n\t\t\t\t\t<button class=\"ds-btn-primary px-3 py-2\" onclick={() => saveIncidentUpdates()}>Save Incident</button>\n\t\t\t\t</div>\n\n\t\t\t\t<div class=\"ds-card overflow-hidden\">\n\t\t\t\t\t<div class=\"px-4 py-3 border-b border-gray-200 dark:border-gray-800 bg-gray-50 dark:bg-gray-900\">\n\t\t\t\t\t\t<h4 class=\"text-sm font-semibold text-gray-900 dark:text-white\">Comments</h4>\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"p-4 space-y-3\">\n\t\t\t\t\t\t<div class=\"flex gap-2\">\n\t\t\t\t\t\t\t<textarea class=\"ds-textarea flex-1\" rows=\"2\" bind:value={incidentCommentDraft} placeholder=\"Add incident comment\"></textarea>\n\t\t\t\t\t\t\t<button class=\"ds-btn-primary px-3 py-2 h-fit\" onclick={() => addIncidentComment()}>\n\t\t\t\t\t\t\t\t<Plus class=\"w-3.5 h-3.5\" />\n\t\t\t\t\t\t\t\tAdd\n\t\t\t\t\t\t\t</button>\n\t\t\t\t\t\t</div>\n\t\t\t\t\t\t{#if incidentComments.length > 0}\n\t\t\t\t\t\t\t<div class=\"space-y-2 max-h-72 overflow-auto\">\n\t\t\t\t\t\t\t\t{#each incidentComments as comment}\n\t\t\t\t\t\t\t\t\t<div class=\"rounded-lg border border-gray-200 dark:border-gray-800 p-3 bg-white dark:bg-gray-950\">\n\t\t\t\t\t\t\t\t\t\t<p class=\"text-sm text-gray-800 dark:text-gray-200 whitespace-pre-wrap\">{comment.comment_text}</p>\n\t\t\t\t\t\t\t\t\t\t<p class=\"text-xs text-gray-500 dark:text-gray-400 mt-1\">{comment.created_by || 'unknown'} · {formatTime(comment.created_at)}</p>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t{/each}\n\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t{:else}\n\t\t\t\t\t\t\t<p class=\"text-sm text-gray-500 dark:text-gray-400\">No comments yet.</p>\n\t\t\t\t\t\t{/if}\n\t\t\t\t\t</div>\n\t\t\t\t</div>\n\t\t\t</div>\n\t\t{/if}\n\t</Sheet>\n\n\t<Sheet\n\t\topen={showPolicyForm}\n\t\ttitle={editingPolicy ? 'Edit Policy' : 'Create Policy'}\n\t\tsize=\"lg\"\n\t\tonclose={closePolicyForm}\n\t>\n\t\t<form\n\t\t\tonsubmit={(e) => {\n\t\t\t\te.preventDefault();\n\t\t\t\thandlePolicySubmit();\n\t\t\t}}\n\t\t\tclass=\"space-y-4\"\n\t\t>\n\t\t\t<div>\n\t\t\t\t<label for=\"policy-name\" class=\"ds-form-label\">Name</label>\n\t\t\t\t<input\n\t\t\t\t\tid=\"policy-name\"\n\t\t\t\t\ttype=\"text\"\n\t\t\t\t\tbind:value={policyForm.name}\n\t\t\t\t\trequired\n\t\t\t\t\tclass=\"ds-input\"\n\t\t\t\t/>\n\t\t\t</div>\n\n\t\t\t<div>\n\t\t\t\t<label for=\"policy-description\" class=\"ds-form-label\">Description</label>\n\t\t\t\t\t<textarea\n\t\t\t\t\t\tid=\"policy-description\"\n\t\t\t\t\t\tbind:value={policyForm.description}\n\t\t\t\t\t\trows=\"3\"\n\t\t\t\t\t\tclass=\"ds-textarea\"\n\t\t\t\t\t></textarea>\n\t\t\t</div>\n\n\t\t\t<div class=\"grid grid-cols-1 md:grid-cols-2 gap-4\">\n\t\t\t\t<div>\n\t\t\t\t\t<label for=\"policy-object-type\" class=\"ds-form-label\">Object Type</label>\n\t\t\t\t\t<Combobox\n\t\t\t\t\t\toptions={policyObjectTypeOptions}\n\t\t\t\t\t\tvalue={policyForm.object_type}\n\t\t\t\t\t\tonChange={(v) => policyForm.object_type = v as Policy['object_type']}\n\t\t\t\t\t\tplaceholder=\"Object Type\"\n\t\t\t\t\t/>\n\t\t\t\t</div>\n\t\t\t\t<div>\n\t\t\t\t\t<label for=\"policy-required-role\" class=\"ds-form-label\">Required Role</label>\n\t\t\t\t\t<input\n\t\t\t\t\t\tid=\"policy-required-role\"\n\t\t\t\t\t\ttype=\"text\"\n\t\t\t\t\t\tbind:value={policyForm.required_role}\n\t\t\t\t\t\tplaceholder=\"e.g. analyst\"\n\t\t\t\t\t\tclass=\"ds-input\"\n\t\t\t\t\t/>\n\t\t\t\t</div>\n\t\t\t</div>\n\n\t\t\t<div class=\"grid grid-cols-1 md:grid-cols-3 gap-4\">\n\t\t\t\t<div>\n\t\t\t\t\t<label for=\"policy-database\" class=\"ds-form-label\">Database</label>\n\t\t\t\t\t<input\n\t\t\t\t\t\tid=\"policy-database\"\n\t\t\t\t\t\ttype=\"text\"\n\t\t\t\t\t\tbind:value={policyForm.object_database}\n\t\t\t\t\t\tplaceholder=\"Optional\"\n\t\t\t\t\t\tclass=\"ds-input\"\n\t\t\t\t\t/>\n\t\t\t\t</div>\n\t\t\t\t<div>\n\t\t\t\t\t<label for=\"policy-table\" class=\"ds-form-label\">Table</label>\n\t\t\t\t\t<input\n\t\t\t\t\t\tid=\"policy-table\"\n\t\t\t\t\t\ttype=\"text\"\n\t\t\t\t\t\tbind:value={policyForm.object_table}\n\t\t\t\t\t\tplaceholder=\"Optional\"\n\t\t\t\t\t\tclass=\"ds-input\"\n\t\t\t\t\t/>\n\t\t\t\t</div>\n\t\t\t\t<div>\n\t\t\t\t\t<label for=\"policy-column\" class=\"ds-form-label\">Column</label>\n\t\t\t\t\t<input\n\t\t\t\t\t\tid=\"policy-column\"\n\t\t\t\t\t\ttype=\"text\"\n\t\t\t\t\t\tbind:value={policyForm.object_column}\n\t\t\t\t\t\tplaceholder=\"Optional\"\n\t\t\t\t\t\tclass=\"ds-input\"\n\t\t\t\t\t/>\n\t\t\t\t</div>\n\t\t\t</div>\n\n\t\t\t<div class=\"grid grid-cols-1 md:grid-cols-3 gap-4\">\n\t\t\t\t<div>\n\t\t\t\t\t<label for=\"policy-severity\" class=\"ds-form-label\">Severity</label>\n\t\t\t\t\t<Combobox\n\t\t\t\t\t\toptions={policySeverityOptions}\n\t\t\t\t\t\tvalue={policyForm.severity}\n\t\t\t\t\t\tonChange={(v) => policyForm.severity = v}\n\t\t\t\t\t\tplaceholder=\"Severity\"\n\t\t\t\t\t/>\n\t\t\t\t</div>\n\t\t\t\t<div>\n\t\t\t\t\t<label for=\"policy-enforcement-mode\" class=\"ds-form-label\">Mode</label>\n\t\t\t\t\t<Combobox\n\t\t\t\t\t\toptions={policyEnforcementModeOptions}\n\t\t\t\t\t\tvalue={policyForm.enforcement_mode}\n\t\t\t\t\t\tonChange={(v) => policyForm.enforcement_mode = v as Policy['enforcement_mode']}\n\t\t\t\t\t\tplaceholder=\"Mode\"\n\t\t\t\t\t/>\n\t\t\t\t</div>\n\t\t\t\t<label class=\"ds-panel-muted flex items-center gap-2 px-3 py-2 mt-6\">\n\t\t\t\t\t<input\n\t\t\t\t\t\ttype=\"checkbox\"\n\t\t\t\t\t\tid=\"policy-enabled\"\n\t\t\t\t\t\tbind:checked={policyForm.enabled}\n\t\t\t\t\t\tclass=\"ds-checkbox\"\n\t\t\t\t\t/>\n\t\t\t\t\t<span class=\"text-sm text-gray-700 dark:text-gray-300\">Enabled</span>\n\t\t\t\t</label>\n\t\t\t</div>\n\n\t\t\t<div class=\"flex justify-end gap-3 pt-2\">\n\t\t\t\t<button\n\t\t\t\t\ttype=\"button\"\n\t\t\t\t\tonclick={closePolicyForm}\n\t\t\t\t\tclass=\"ds-btn-outline px-4 py-2\"\n\t\t\t\t>\n\t\t\t\t\tCancel\n\t\t\t\t</button>\n\t\t\t\t<button\n\t\t\t\t\ttype=\"submit\"\n\t\t\t\t\tdisabled={loading}\n\t\t\t\t\tclass=\"ds-btn-primary px-4 py-2 disabled:opacity-50 disabled:cursor-not-allowed\"\n\t\t\t\t>\n\t\t\t\t\t{loading ? 'Saving...' : editingPolicy ? 'Update Policy' : 'Create Policy'}\n\t\t\t\t</button>\n\t\t\t</div>\n\t\t</form>\n\t</Sheet>\n\n<ConfirmDialog\n\topen={confirmPolicyDeleteOpen}\n\ttitle=\"Delete policy?\"\n\tdescription={pendingPolicyDelete ? `Delete \"${pendingPolicyDelete.name}\"? This action cannot be undone.` : 'This action cannot be undone.'}\n\tconfirmLabel=\"Delete\"\n\tdestructive={true}\n\tloading={confirmPolicyDeleteLoading}\n\tonconfirm={confirmDeletePolicy}\n\toncancel={cancelDeletePolicy}\n/>\n\n<Sheet\n\topen={channelSheetOpen}\n\ttitle=\"Create Alert Channel\"\n\tsize=\"xl\"\n\tonclose={() => channelSheetOpen = false}\n>\n\t<form\n\t\tclass=\"space-y-4\"\n\t\tonsubmit={(e) => {\n\t\t\te.preventDefault();\n\t\t\tvoid createAlertChannelRecord();\n\t\t}}\n\t>\n\t\t<div class=\"flex items-center gap-2\">\n\t\t\t<p class=\"text-xs text-gray-500\">Channels hold delivery credentials for alert notifications.</p>\n\t\t\t<HelpTip text=\"Use SMTP for generic email relay, Resend/Brevo for API delivery. Add test recipients above in the main panel to validate setup quickly.\" />\n\t\t</div>\n\n\t\t<div class=\"grid grid-cols-1 md:grid-cols-2 gap-3\">\n\t\t\t<label class=\"space-y-1\">\n\t\t\t\t<span class=\"text-xs text-gray-500\">Channel Name</span>\n\t\t\t\t<input class=\"ds-input-sm\" placeholder=\"Ops SMTP\" bind:value={channelForm.name} required />\n\t\t\t</label>\n\t\t\t<label class=\"space-y-1\">\n\t\t\t\t<span class=\"text-xs text-gray-500\">Channel Type</span>\n\t\t\t\t<Combobox\n\t\t\t\t\toptions={alertChannelTypeOptions}\n\t\t\t\t\tvalue={channelForm.channel_type}\n\t\t\t\t\tonChange={(v) => channelForm = { ...channelForm, channel_type: v as AlertChannelType }}\n\t\t\t\t/>\n\t\t\t</label>\n\t\t</div>\n\n\t\t{#if channelForm.channel_type === 'smtp'}\n\t\t\t<div class=\"grid grid-cols-1 md:grid-cols-2 gap-3\">\n\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t<span class=\"text-xs text-gray-500\">SMTP Host</span>\n\t\t\t\t\t<input class=\"ds-input-sm\" placeholder=\"smtp.sendgrid.net\" bind:value={channelForm.smtp_host} required />\n\t\t\t\t</label>\n\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t<span class=\"text-xs text-gray-500\">SMTP Port</span>\n\t\t\t\t\t<input class=\"ds-input-sm\" type=\"number\" min=\"1\" max=\"65535\" bind:value={channelForm.smtp_port} />\n\t\t\t\t</label>\n\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t<span class=\"text-xs text-gray-500\">Username</span>\n\t\t\t\t\t<input class=\"ds-input-sm\" bind:value={channelForm.smtp_username} />\n\t\t\t\t</label>\n\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t<span class=\"text-xs text-gray-500\">Password</span>\n\t\t\t\t\t<input class=\"ds-input-sm\" type=\"password\" bind:value={channelForm.smtp_password} />\n\t\t\t\t</label>\n\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t<span class=\"text-xs text-gray-500\">From Email</span>\n\t\t\t\t\t<input class=\"ds-input-sm\" type=\"email\" placeholder=\"alerts@company.com\" bind:value={channelForm.smtp_from_email} required />\n\t\t\t\t</label>\n\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t<span class=\"text-xs text-gray-500\">From Name</span>\n\t\t\t\t\t<input class=\"ds-input-sm\" placeholder=\"CH-UI Alerts\" bind:value={channelForm.smtp_from_name} />\n\t\t\t\t</label>\n\t\t\t</div>\n\t\t\t<div class=\"flex flex-wrap items-center gap-4\">\n\t\t\t\t<label class=\"ds-checkbox-label text-xs\">\n\t\t\t\t\t<input type=\"checkbox\" class=\"ds-checkbox\" bind:checked={channelForm.smtp_use_tls} />\n\t\t\t\t\tTLS\n\t\t\t\t</label>\n\t\t\t\t<label class=\"ds-checkbox-label text-xs\">\n\t\t\t\t\t<input type=\"checkbox\" class=\"ds-checkbox\" bind:checked={channelForm.smtp_starttls} />\n\t\t\t\t\tSTARTTLS\n\t\t\t\t</label>\n\t\t\t</div>\n\t\t{:else}\n\t\t\t<div class=\"grid grid-cols-1 md:grid-cols-2 gap-3\">\n\t\t\t\t<label class=\"space-y-1 md:col-span-2\">\n\t\t\t\t\t<span class=\"text-xs text-gray-500\">API Key</span>\n\t\t\t\t\t<input class=\"ds-input-sm\" type=\"password\" bind:value={channelForm.api_key} required />\n\t\t\t\t</label>\n\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t<span class=\"text-xs text-gray-500\">From Email</span>\n\t\t\t\t\t<input class=\"ds-input-sm\" type=\"email\" placeholder=\"alerts@company.com\" bind:value={channelForm.api_from_email} required />\n\t\t\t\t</label>\n\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t<span class=\"text-xs text-gray-500\">From Name</span>\n\t\t\t\t\t<input class=\"ds-input-sm\" placeholder=\"CH-UI Alerts\" bind:value={channelForm.api_from_name} />\n\t\t\t\t</label>\n\t\t\t\t<label class=\"space-y-1 md:col-span-2\">\n\t\t\t\t\t<span class=\"text-xs text-gray-500\">Base URL (optional)</span>\n\t\t\t\t\t<input class=\"ds-input-sm\" placeholder=\"Leave empty for provider default\" bind:value={channelForm.api_base_url} />\n\t\t\t\t</label>\n\t\t\t</div>\n\t\t{/if}\n\n\t\t<label class=\"ds-checkbox-label text-xs\">\n\t\t\t<input type=\"checkbox\" class=\"ds-checkbox\" bind:checked={channelForm.is_active} />\n\t\t\tActive\n\t\t</label>\n\n\t\t<div class=\"flex items-center justify-end gap-2 pt-2 border-t border-gray-200 dark:border-gray-800\">\n\t\t\t<button type=\"button\" class=\"ds-btn-outline\" onclick={() => channelSheetOpen = false}>Cancel</button>\n\t\t\t<button type=\"submit\" class=\"ds-btn-primary\" disabled={!channelForm.name.trim()}>Create Channel</button>\n\t\t</div>\n\t</form>\n</Sheet>\n\n<Sheet\n\topen={ruleSheetOpen}\n\ttitle=\"Create Alert Rule\"\n\tsize=\"xl\"\n\tonclose={() => ruleSheetOpen = false}\n>\n\t<form\n\t\tclass=\"space-y-4\"\n\t\tonsubmit={(e) => {\n\t\t\te.preventDefault();\n\t\t\tvoid createAlertRuleRecord();\n\t\t}}\n\t>\n\t\t<div class=\"flex items-center gap-2\">\n\t\t\t<p class=\"text-xs text-gray-500\">Rules map governance/system events to delivery routes and escalation behavior.</p>\n\t\t\t<HelpTip text=\"Each route must include a channel and recipients. You can mix immediate and digest routes under the same rule.\" />\n\t\t</div>\n\n\t\t<div class=\"grid grid-cols-1 md:grid-cols-2 gap-3\">\n\t\t\t<label class=\"space-y-1\">\n\t\t\t\t<span class=\"text-xs text-gray-500\">Rule Name</span>\n\t\t\t\t<input class=\"ds-input-sm\" placeholder=\"Critical policy violations\" bind:value={ruleForm.name} required />\n\t\t\t</label>\n\t\t\t<label class=\"space-y-1\">\n\t\t\t\t<span class=\"text-xs text-gray-500\">Event Type</span>\n\t\t\t\t<Combobox\n\t\t\t\t\toptions={alertEventTypeOptions}\n\t\t\t\t\tvalue={ruleForm.event_type}\n\t\t\t\t\tonChange={(v) => ruleForm = { ...ruleForm, event_type: v }}\n\t\t\t\t/>\n\t\t\t</label>\n\t\t\t<label class=\"space-y-1\">\n\t\t\t\t<span class=\"text-xs text-gray-500\">Minimum Severity</span>\n\t\t\t\t<Combobox\n\t\t\t\t\toptions={alertSeverityOptions}\n\t\t\t\t\tvalue={ruleForm.severity_min}\n\t\t\t\t\tonChange={(v) => ruleForm = { ...ruleForm, severity_min: v }}\n\t\t\t\t/>\n\t\t\t</label>\n\t\t\t<label class=\"space-y-1\">\n\t\t\t\t<span class=\"text-xs text-gray-500\">Cooldown (seconds)</span>\n\t\t\t\t<input class=\"ds-input-sm\" type=\"number\" min=\"0\" bind:value={ruleForm.cooldown_seconds} />\n\t\t\t</label>\n\t\t\t<label class=\"space-y-1\">\n\t\t\t\t<span class=\"text-xs text-gray-500\">Max Attempts</span>\n\t\t\t\t<input class=\"ds-input-sm\" type=\"number\" min=\"1\" bind:value={ruleForm.max_attempts} />\n\t\t\t</label>\n\t\t\t<label class=\"space-y-1\">\n\t\t\t\t<span class=\"text-xs text-gray-500\">Subject Template</span>\n\t\t\t\t<input class=\"ds-input-sm\" placeholder=\"Optional\" bind:value={ruleForm.subject_template} />\n\t\t\t</label>\n\t\t\t<label class=\"space-y-1 md:col-span-2\">\n\t\t\t\t<span class=\"text-xs text-gray-500\">Body Template</span>\n\t\t\t\t<input class=\"ds-input-sm\" placeholder=\"Optional\" bind:value={ruleForm.body_template} />\n\t\t\t</label>\n\t\t</div>\n\n\t\t<label class=\"ds-checkbox-label text-xs\">\n\t\t\t<input type=\"checkbox\" class=\"ds-checkbox\" bind:checked={ruleForm.enabled} />\n\t\t\tEnabled\n\t\t</label>\n\n\t\t<div class=\"space-y-2 border-t border-gray-200 dark:border-gray-800 pt-3\">\n\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t<h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">Routes</h3>\n\t\t\t\t<button type=\"button\" class=\"ds-btn-outline\" onclick={() => addRuleRouteDraft()}>Add Route</button>\n\t\t\t</div>\n\n\t\t\t{#if alertChannels.length === 0}\n\t\t\t\t<p class=\"text-xs text-amber-600 dark:text-amber-400\">Create at least one alert channel before adding routes.</p>\n\t\t\t{/if}\n\n\t\t\t{#each ruleRoutesDraft as route, idx}\n\t\t\t\t<div class=\"ds-panel-muted p-3 space-y-3\">\n\t\t\t\t\t<div class=\"flex items-center justify-between\">\n\t\t\t\t\t\t<p class=\"text-xs font-semibold text-gray-700 dark:text-gray-300\">Route {idx + 1}</p>\n\t\t\t\t\t\t{#if ruleRoutesDraft.length > 1}\n\t\t\t\t\t\t\t<button type=\"button\" class=\"text-xs text-red-500 hover:text-red-700\" onclick={() => removeRuleRouteDraft(idx)}>Remove</button>\n\t\t\t\t\t\t{/if}\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"grid grid-cols-1 md:grid-cols-2 gap-3\">\n\t\t\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500\">Channel</span>\n\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\toptions={alertChannelOptions()}\n\t\t\t\t\t\t\t\tvalue={route.channel_id}\n\t\t\t\t\t\t\t\tonChange={(v) => updateRuleRouteDraft(idx, { channel_id: v })}\n\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t</label>\n\t\t\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500\">Recipients (comma-separated)</span>\n\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\tclass=\"ds-input-sm\"\n\t\t\t\t\t\t\t\tplaceholder=\"ops@company.com, data@company.com\"\n\t\t\t\t\t\t\t\tvalue={route.recipients}\n\t\t\t\t\t\t\t\toninput={(e) => updateRuleRouteDraft(idx, { recipients: (e.target as HTMLInputElement).value })}\n\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t</label>\n\t\t\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500\">Delivery Mode</span>\n\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\toptions={routeDeliveryModeOptions}\n\t\t\t\t\t\t\t\tvalue={route.delivery_mode}\n\t\t\t\t\t\t\t\tonChange={(v) => updateRuleRouteDraft(idx, { delivery_mode: v as 'immediate' | 'digest' })}\n\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t</label>\n\t\t\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500\">Digest Window (minutes)</span>\n\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\tclass=\"ds-input-sm\"\n\t\t\t\t\t\t\t\ttype=\"number\"\n\t\t\t\t\t\t\t\tmin=\"1\"\n\t\t\t\t\t\t\t\tdisabled={route.delivery_mode !== 'digest'}\n\t\t\t\t\t\t\t\tvalue={route.digest_window_minutes}\n\t\t\t\t\t\t\t\toninput={(e) => updateRuleRouteDraft(idx, { digest_window_minutes: Number((e.target as HTMLInputElement).value) || 15 })}\n\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t</label>\n\t\t\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500\">Escalation Channel</span>\n\t\t\t\t\t\t\t<Combobox\n\t\t\t\t\t\t\t\toptions={alertChannelOptions()}\n\t\t\t\t\t\t\t\tvalue={route.escalation_channel_id}\n\t\t\t\t\t\t\t\tonChange={(v) => updateRuleRouteDraft(idx, { escalation_channel_id: v })}\n\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t</label>\n\t\t\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500\">Escalation Recipients</span>\n\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\tclass=\"ds-input-sm\"\n\t\t\t\t\t\t\t\tplaceholder=\"manager@company.com\"\n\t\t\t\t\t\t\t\tvalue={route.escalation_recipients}\n\t\t\t\t\t\t\t\toninput={(e) => updateRuleRouteDraft(idx, { escalation_recipients: (e.target as HTMLInputElement).value })}\n\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t</label>\n\t\t\t\t\t\t<label class=\"space-y-1\">\n\t\t\t\t\t\t\t<span class=\"text-xs text-gray-500\">Escalate After Failures</span>\n\t\t\t\t\t\t\t<input\n\t\t\t\t\t\t\t\tclass=\"ds-input-sm\"\n\t\t\t\t\t\t\t\ttype=\"number\"\n\t\t\t\t\t\t\t\tmin=\"0\"\n\t\t\t\t\t\t\t\tvalue={route.escalation_after_failures}\n\t\t\t\t\t\t\t\toninput={(e) => updateRuleRouteDraft(idx, { escalation_after_failures: Number((e.target as HTMLInputElement).value) || 0 })}\n\t\t\t\t\t\t\t/>\n\t\t\t\t\t\t</label>\n\t\t\t\t\t</div>\n\t\t\t\t\t<label class=\"ds-checkbox-label text-xs\">\n\t\t\t\t\t\t<input\n\t\t\t\t\t\t\ttype=\"checkbox\"\n\t\t\t\t\t\t\tclass=\"ds-checkbox\"\n\t\t\t\t\t\t\tchecked={route.is_active}\n\t\t\t\t\t\t\tonchange={(e) => updateRuleRouteDraft(idx, { is_active: (e.target as HTMLInputElement).checked })}\n\t\t\t\t\t\t/>\n\t\t\t\t\t\tRoute active\n\t\t\t\t\t</label>\n\t\t\t\t</div>\n\t\t\t{/each}\n\t\t</div>\n\n\t\t<div class=\"flex items-center justify-end gap-2 pt-2 border-t border-gray-200 dark:border-gray-800\">\n\t\t\t<button type=\"button\" class=\"ds-btn-outline\" onclick={() => ruleSheetOpen = false}>Cancel</button>\n\t\t\t<button type=\"submit\" class=\"ds-btn-primary\" disabled={!ruleForm.name.trim()}>Create Rule</button>\n\t\t</div>\n\t</form>\n</Sheet>\n\n<ConfirmDialog\n\topen={deletingNoteId !== null}\n\ttitle=\"Delete note?\"\n\tdescription=\"Are you sure you want to delete this note? This cannot be undone.\"\n\tconfirmLabel=\"Delete\"\n\tdestructive\n\tonconfirm={confirmDeleteNote}\n\toncancel={() => deletingNoteId = null}\n/>\n\n<ConfirmDialog\n\topen={deletingChannel !== null}\n\ttitle=\"Delete channel?\"\n\tdescription={deletingChannel ? `Delete \"${deletingChannel.name}\"? This cannot be undone.` : ''}\n\tconfirmLabel=\"Delete\"\n\tdestructive\n\tonconfirm={confirmDeleteChannel}\n\toncancel={() => deletingChannel = null}\n/>\n\n<ConfirmDialog\n\topen={deletingRule !== null}\n\ttitle=\"Delete rule?\"\n\tdescription={deletingRule ? `Delete \"${deletingRule.name}\"? This cannot be undone.` : ''}\n\tconfirmLabel=\"Delete\"\n\tdestructive\n\tonconfirm={confirmDeleteRule}\n\toncancel={() => deletingRule = null}\n/>\n\n<ConfirmDialog\n\topen={showEnableConfirm}\n\ttitle=\"Enable governance sync?\"\n\tdescription=\"The syncer will poll your ClickHouse cluster every 5 minutes, borrowing credentials from your admin session. Each borrow is recorded in the audit log. Make sure you've reviewed the disclosure on this page.\"\n\tconfirmLabel=\"I understand, enable\"\n\tloading={govToggleSaving}\n\tonconfirm={confirmEnableGovernanceSync}\n\toncancel={() => showEnableConfirm = false}\n/>\n\n<ConfirmDialog\n\topen={showDisableConfirm}\n\ttitle=\"Disable governance sync?\"\n\tdescription=\"The syncer will stop immediately. Existing collected data is preserved and remains visible in the UI.\"\n\tconfirmLabel=\"Disable\"\n\tdestructive\n\tloading={govToggleSaving}\n\tonconfirm={confirmDisableGovernanceSync}\n\toncancel={() => showDisableConfirm = false}\n/>\n"
  },
  {
    "path": "ui/src/pages/Home.svelte",
    "content": "<script lang=\"ts\">\n  import {\n    ArrowUpRight,\n    BookOpen,\n    Bookmark,\n    Brain,\n    Clock3,\n    Home,\n    LayoutDashboard,\n    Shield,\n    SquareTerminal,\n    Table2,\n    Workflow,\n  } from 'lucide-svelte'\n  import { getSession } from '../lib/stores/session.svelte'\n  import { getTabs, openDashboardTab, openQueryTab, openSingletonTab } from '../lib/stores/tabs.svelte'\n  import type { Tab } from '../lib/stores/tabs.svelte'\n\n  interface QuickLink {\n    id: string\n    title: string\n    description: string\n    icon: typeof SquareTerminal\n    run: () => void\n  }\n\n  interface ResourceLink {\n    id: string\n    title: string\n    description: string\n    href: string\n  }\n\n  const session = $derived(getSession())\n  const tabs = $derived(getTabs())\n\n  const recentTabs = $derived.by(() =>\n    tabs.filter((tab) => tab.type !== 'home').slice(-8).reverse(),\n  )\n\n  const quickLinks: QuickLink[] = [\n    {\n      id: 'new-query',\n      title: 'New Query',\n      description: 'Write and run SQL',\n      icon: SquareTerminal,\n      run: () => openQueryTab(),\n    },\n    {\n      id: 'saved-queries',\n      title: 'Saved Queries',\n      description: 'Browse and manage saved SQL',\n      icon: Bookmark,\n      run: () => openSingletonTab('saved-queries', 'Saved Queries'),\n    },\n    {\n      id: 'schedules',\n      title: 'Schedules',\n      description: 'Manage cron jobs and run history',\n      icon: Clock3,\n      run: () => openSingletonTab('schedules', 'Schedules'),\n    },\n    {\n      id: 'dashboards',\n      title: 'Dashboards',\n      description: 'Open visual dashboards',\n      icon: LayoutDashboard,\n      run: () => openSingletonTab('dashboards', 'Dashboards'),\n    },\n    {\n      id: 'brain',\n      title: 'Brain',\n      description: 'AI assistant for ClickHouse workflows',\n      icon: Brain,\n      run: () => openSingletonTab('brain', 'Brain'),\n    },\n    {\n      id: 'pipelines',\n      title: 'Pipelines',\n      description: 'Visual data pipeline builder',\n      icon: Workflow,\n      run: () => openSingletonTab('pipelines', 'Pipelines'),\n    },\n    {\n      id: 'admin',\n      title: 'Admin',\n      description: 'Users, alerts, and audit controls',\n      icon: Shield,\n      run: () => openSingletonTab('admin', 'Admin'),\n    },\n  ]\n\n  const resources: ResourceLink[] = [\n    {\n      id: 'clickhouse-docs',\n      title: 'ClickHouse Docs',\n      description: 'Official docs and SQL reference',\n      href: 'https://clickhouse.com/docs',\n    },\n    {\n      id: 'project-releases',\n      title: 'CH-UI Releases',\n      description: 'Binary downloads and release notes',\n      href: 'https://github.com/caioricciuti/ch-ui/releases',\n    },\n    {\n      id: 'project-issues',\n      title: 'Report Issue',\n      description: 'Open bug reports and feature requests',\n      href: 'https://github.com/caioricciuti/ch-ui/issues',\n    },\n  ]\n\n  function openTab(tab: Tab): void {\n    if (tab.type === 'query') {\n      openQueryTab(tab.sql)\n      return\n    }\n    if (tab.type === 'table') {\n      openQueryTab(`SELECT *\\nFROM \\`${tab.database}\\`.\\`${tab.table}\\`\\nLIMIT 1000`)\n      return\n    }\n    if (tab.type === 'database') {\n      openQueryTab(`SHOW TABLES FROM \\`${tab.database}\\``)\n      return\n    }\n    if (tab.type === 'dashboard') {\n      openDashboardTab(tab.dashboardId, tab.name)\n      return\n    }\n    if (tab.type === 'saved-queries') openSingletonTab('saved-queries', 'Saved Queries')\n    if (tab.type === 'dashboards') openSingletonTab('dashboards', 'Dashboards')\n    if (tab.type === 'schedules') openSingletonTab('schedules', 'Schedules')\n    if (tab.type === 'brain') openSingletonTab('brain', 'Brain')\n    if (tab.type === 'admin') openSingletonTab('admin', 'Admin')\n    if (tab.type === 'settings') openSingletonTab('settings', 'License')\n    if (tab.type === 'governance') openSingletonTab('governance', 'Governance')\n    if (tab.type === 'pipelines') openSingletonTab('pipelines', 'Pipelines')\n  }\n\n  function recentSubtitle(tab: Tab): string {\n    switch (tab.type) {\n      case 'query':\n        return 'SQL query'\n      case 'table':\n        return `${tab.database}.${tab.table}`\n      case 'database':\n        return `${tab.database} database`\n      case 'dashboard':\n        return 'Dashboard'\n      case 'saved-queries':\n      case 'dashboards':\n      case 'schedules':\n      case 'brain':\n      case 'admin':\n      case 'settings':\n      case 'governance':\n      case 'pipelines':\n        return tab.name\n      default:\n        return 'Workspace item'\n    }\n  }\n\n  function recentPreview(tab: Tab): string {\n    if (tab.type === 'query') {\n      return tab.sql.trim().split('\\n')[0] || 'Empty query'\n    }\n    if (tab.type === 'table') return `Open table ${tab.database}.${tab.table}`\n    if (tab.type === 'database') return `Open database ${tab.database}`\n    if (tab.type === 'dashboard') return `Dashboard ${tab.dashboardId}`\n    return `Open ${tab.name}`\n  }\n\n  function recentIcon(tab: Tab): typeof SquareTerminal {\n    if (tab.type === 'query') return SquareTerminal\n    if (tab.type === 'table' || tab.type === 'database') return Table2\n    if (tab.type === 'dashboard' || tab.type === 'dashboards') return LayoutDashboard\n    if (tab.type === 'saved-queries') return Bookmark\n    if (tab.type === 'schedules') return Clock3\n    if (tab.type === 'brain') return Brain\n    if (tab.type === 'pipelines') return Workflow\n    if (tab.type === 'admin' || tab.type === 'governance') return Shield\n    return Home\n  }\n</script>\n\n<div class=\"h-full overflow-auto bg-gradient-to-b from-transparent via-gray-100/20 to-gray-100/35 dark:from-transparent dark:via-gray-900/20 dark:to-gray-900/35\">\n  <div class=\"mx-auto w-full max-w-6xl p-5 lg:p-7 space-y-5\">\n    <section class=\"surface-card rounded-2xl border border-gray-200/80 dark:border-gray-800/80 p-5 lg:p-6\">\n      <div class=\"flex flex-col gap-3 md:flex-row md:items-center md:justify-between\">\n        <div class=\"space-y-1\">\n          <div class=\"inline-flex items-center gap-2 rounded-full border border-orange-300/45 dark:border-orange-700/45 bg-orange-100/60 dark:bg-orange-500/12 px-3 py-1 text-xs font-medium text-ch-orange\">\n            <Home size={12} />\n            Workspace Home\n          </div>\n          <h1 class=\"text-xl font-semibold text-gray-900 dark:text-gray-100\">\n            Welcome back{session?.user ? `, ${session.user}` : ''}\n          </h1>\n          <p class=\"text-sm text-gray-500 dark:text-gray-400\">Start a new query, jump into saved work, or open tools quickly.</p>\n        </div>\n        <button\n          class=\"inline-flex items-center justify-center gap-2 rounded-lg bg-ch-orange px-4 py-2 text-sm font-semibold text-white hover:bg-orange-500 transition-colors\"\n          onclick={() => openQueryTab()}\n        >\n          <SquareTerminal size={15} />\n          Run Query\n        </button>\n      </div>\n    </section>\n\n    <section class=\"grid grid-cols-1 md:grid-cols-2 xl:grid-cols-3 gap-3.5\">\n      {#each quickLinks as item (item.id)}\n        <button\n          class=\"surface-card rounded-xl border border-gray-200/70 dark:border-gray-800/70 px-4 py-3.5 text-left hover:border-ch-orange/45 hover:bg-orange-50/35 dark:hover:bg-orange-500/8 transition-colors\"\n          onclick={item.run}\n        >\n          <div class=\"flex items-center gap-2.5\">\n            <div class=\"inline-flex h-8 w-8 items-center justify-center rounded-lg bg-gray-100 dark:bg-gray-900 text-ch-orange\">\n              <item.icon size={15} />\n            </div>\n            <div>\n              <p class=\"text-sm font-semibold text-gray-900 dark:text-gray-100\">{item.title}</p>\n              <p class=\"text-xs text-gray-500 dark:text-gray-400\">{item.description}</p>\n            </div>\n          </div>\n        </button>\n      {/each}\n    </section>\n\n    <section class=\"surface-card rounded-2xl border border-gray-200/80 dark:border-gray-800/80 p-4\">\n      <div class=\"flex items-center gap-2 mb-3\">\n        <BookOpen size={14} class=\"text-ch-orange\" />\n        <h2 class=\"text-sm font-semibold text-gray-900 dark:text-gray-100\">Resources</h2>\n      </div>\n      <div class=\"grid grid-cols-1 md:grid-cols-3 gap-3\">\n        {#each resources as resource (resource.id)}\n          <a\n            class=\"rounded-xl border border-gray-200/70 dark:border-gray-800/70 px-3.5 py-3 hover:border-ch-blue/45 hover:bg-ch-blue/5 transition-colors\"\n            href={resource.href}\n            target=\"_blank\"\n            rel=\"noopener noreferrer\"\n          >\n            <p class=\"flex items-center gap-1.5 text-sm font-semibold text-gray-900 dark:text-gray-100\">\n              {resource.title}\n              <ArrowUpRight size={13} class=\"text-gray-400\" />\n            </p>\n            <p class=\"mt-1 text-xs text-gray-500 dark:text-gray-400\">{resource.description}</p>\n          </a>\n        {/each}\n      </div>\n    </section>\n\n    <section class=\"space-y-3\">\n      <div class=\"flex items-center justify-between\">\n        <h2 class=\"text-sm font-semibold text-gray-900 dark:text-gray-100\">Recently Opened</h2>\n      </div>\n      {#if recentTabs.length === 0}\n        <div class=\"surface-card rounded-xl border border-dashed border-gray-300 dark:border-gray-700 px-4 py-8 text-center\">\n          <p class=\"text-sm text-gray-500 dark:text-gray-400\">No recent workspace items yet.</p>\n          <button\n            class=\"mt-3 inline-flex items-center gap-2 rounded-lg border border-gray-300 dark:border-gray-700 px-3 py-1.5 text-xs font-semibold text-gray-700 dark:text-gray-200 hover:border-ch-orange hover:text-ch-orange transition-colors\"\n            onclick={() => openQueryTab()}\n          >\n            <SquareTerminal size={13} />\n            Create first query\n          </button>\n        </div>\n      {:else}\n        <div class=\"grid grid-cols-1 md:grid-cols-2 xl:grid-cols-3 gap-3\">\n          {#each recentTabs as tab (tab.id)}\n            {@const Icon = recentIcon(tab)}\n            <button\n              class=\"surface-card rounded-xl border border-gray-200/75 dark:border-gray-800/75 px-4 py-3 text-left hover:border-ch-orange/45 hover:bg-orange-50/30 dark:hover:bg-orange-500/8 transition-colors\"\n              onclick={() => openTab(tab)}\n            >\n              <div class=\"flex items-center gap-2 text-xs text-gray-500 dark:text-gray-400\">\n                <Icon size={13} />\n                <span>{recentSubtitle(tab)}</span>\n              </div>\n              <p class=\"mt-2 text-sm font-semibold text-gray-900 dark:text-gray-100 truncate\">{tab.name}</p>\n              <p class=\"mt-1 text-xs text-gray-500 dark:text-gray-400 truncate\">{recentPreview(tab)}</p>\n            </button>\n          {/each}\n        </div>\n      {/if}\n    </section>\n  </div>\n</div>\n"
  },
  {
    "path": "ui/src/pages/Login.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from \"svelte\";\n  import { listConnections } from \"../lib/api/auth\";\n  import { login, getError } from \"../lib/stores/session.svelte\";\n  import type { Connection } from \"../lib/types/api\";\n  import Button from \"../lib/components/common/Button.svelte\";\n  import Combobox from \"../lib/components/common/Combobox.svelte\";\n  import Spinner from \"../lib/components/common/Spinner.svelte\";\n  import Sheet from \"../lib/components/common/Sheet.svelte\";\n  import {\n    Wifi,\n    WifiOff,\n    Database,\n    ShieldCheck,\n    Terminal,\n    Layers,\n    Zap,\n    ArrowRight,\n    AlertTriangle,\n    BookOpen,\n    ExternalLink,\n  } from \"lucide-svelte\";\n  import logo from \"../assets/logo.png\";\n\n  let connections = $state<Connection[]>([]);\n  let loadingConnections = $state(true);\n  let selectedId = $state(\"\");\n  let username = $state(\"\");\n  let password = $state(\"\");\n  let submitting = $state(false);\n  let localError = $state<string | null>(null);\n  let showSetupSheet = $state(false);\n  let setupClickHouseURL = $state(\"http://localhost:8123\");\n  let setupConnectionName = $state(\"Local ClickHouse\");\n\n  type LoginErrorKind = \"auth\" | \"connection\" | \"rateLimit\" | \"generic\";\n\n  type LoginHelp = {\n    title: string;\n    detail?: string;\n  };\n\n  function classifyLoginError(message: string | null): LoginErrorKind | null {\n    if (!message) return null;\n    const normalized = message.toLowerCase();\n    if (normalized.includes(\"invalid credentials\") || normalized.includes(\"authentication failed\")) {\n      return \"auth\";\n    }\n    if (normalized.includes(\"connection offline\") || normalized.includes(\"unreachable\") || normalized.includes(\"tunnel\")) {\n      return \"connection\";\n    }\n    if (normalized.includes(\"too many login attempts\") || normalized.includes(\"retry in\")) {\n      return \"rateLimit\";\n    }\n    return \"generic\";\n  }\n\n  function buildLoginHelp(kind: LoginErrorKind | null): LoginHelp | null {\n    if (!kind) return null;\n    if (kind === \"auth\") {\n      return {\n        title: \"Authentication failed\",\n        detail: \"Verify username/password and selected ClickHouse connection.\",\n      };\n    }\n    if (kind === \"connection\") {\n      return {\n        title: \"Connection unavailable\",\n        detail: \"Start the connector/agent for this connection, then retry.\",\n      };\n    }\n    if (kind === \"rateLimit\") {\n      return {\n        title: \"Login temporarily blocked\",\n        detail: \"Wait for the retry window and try again.\",\n      };\n    }\n    return { title: \"Login failed\" };\n  }\n\n  onMount(async () => {\n    try {\n      connections = await listConnections();\n      if (connections.length === 1) {\n        selectedId = connections[0].id;\n      }\n    } catch (e: any) {\n      localError = e.message || \"Failed to load connections\";\n    } finally {\n      loadingConnections = false;\n    }\n  });\n\n  async function handleSubmit(e: SubmitEvent) {\n    e.preventDefault();\n    if (!selectedId || !username) {\n      localError = \"Connection and username are required\";\n      return;\n    }\n    const selected = connections.find((c) => c.id === selectedId);\n    if (selected && !selected.online) {\n      localError = `Connection \"${selected.name}\" is offline. Bring it online and retry.`;\n      return;\n    }\n\n    localError = null;\n    submitting = true;\n    try {\n      await login(selectedId, username, password);\n    } catch (e: any) {\n      localError = e.message || \"Login failed\";\n    } finally {\n      submitting = false;\n    }\n  }\n\n  function shellQuote(value: string): string {\n    return `'${value.replace(/'/g, `'\\\"'\\\"'`)}'`;\n  }\n\n  const error = $derived(localError || getError());\n  const selectedConnection = $derived(connections.find((c) => c.id === selectedId) || null);\n  const canSubmit = $derived(Boolean(selectedId && username && (selectedConnection ? selectedConnection.online : false)));\n  const errorKind = $derived(classifyLoginError(error));\n  const loginHelp = $derived(buildLoginHelp(errorKind));\n  const showSetupRecoveryCTA = $derived(errorKind === \"connection\" || errorKind === \"rateLimit\");\n  const quickHelpURL = \"https://github.com/caioricciuti/ch-ui#cant-login\";\n  const cantLoginDocsURL = \"https://github.com/caioricciuti/ch-ui/blob/main/docs/cant-login.md\";\n  const dockerDocsURL = \"https://github.com/caioricciuti/ch-ui#quick-start-docker\";\n  const normalizedSetupURL = $derived(setupClickHouseURL.trim() || \"http://localhost:8123\");\n  const normalizedSetupConnectionName = $derived(setupConnectionName.trim() || \"Local ClickHouse\");\n  const localCommand = $derived(\n    `ch-ui server --clickhouse-url ${shellQuote(normalizedSetupURL)} --connection-name ${shellQuote(normalizedSetupConnectionName)}`\n  );\n  const localCommandWithBinary = $derived(\n    `./ch-ui server --clickhouse-url ${shellQuote(normalizedSetupURL)} --connection-name ${shellQuote(normalizedSetupConnectionName)}`\n  );\n  const dockerCommand = $derived(\n    `docker run --rm -p 3488:3488 -v ch-ui-data:/app/data -e CLICKHOUSE_URL=${shellQuote(normalizedSetupURL)} -e CONNECTION_NAME=${shellQuote(normalizedSetupConnectionName)} ghcr.io/caioricciuti/ch-ui:latest`\n  );\n</script>\n\n<div class=\"login-root\">\n  <!-- Left Panel — Branding -->\n  <div class=\"left-panel\">\n    <div class=\"left-panel-noise\"></div>\n    <div class=\"left-panel-grid\"></div>\n\n    <div class=\"left-content\">\n      <div class=\"logo-block\">\n        <img src={logo} alt=\"CH-UI\" class=\"logo-img\" />\n        <div class=\"logo-text\">\n          <span class=\"logo-name\">CH-UI</span>\n          <span class=\"logo-version\">v2.0</span>\n        </div>\n      </div>\n\n      <h2 class=\"hero-title\">\n        Your ClickHouse<br />\n        <span class=\"hero-accent\">command center.</span>\n      </h2>\n\n      <p class=\"hero-sub\">\n        Query, explore, and manage your ClickHouse clusters with a modern\n        workspace built for speed.\n      </p>\n\n      <div class=\"features\">\n        <div class=\"feature-item\">\n          <div class=\"feature-icon\">\n            <Terminal size={16} />\n          </div>\n          <div>\n            <span class=\"feature-label\">SQL Editor</span>\n            <span class=\"feature-desc\"\n              >Multi-tab query workspace with autocomplete</span\n            >\n          </div>\n        </div>\n        <div class=\"feature-item\">\n          <div class=\"feature-icon\">\n            <Layers size={16} />\n          </div>\n          <div>\n            <span class=\"feature-label\">Schema Explorer</span>\n            <span class=\"feature-desc\"\n              >Browse databases, tables, and columns</span\n            >\n          </div>\n        </div>\n        <div class=\"feature-item\">\n          <div class=\"feature-icon\">\n            <Zap size={16} />\n          </div>\n          <div>\n            <span class=\"feature-label\">Performance</span>\n            <span class=\"feature-desc\"\n              >Real-time metrics and query profiling</span\n            >\n          </div>\n        </div>\n      </div>\n\n      <div class=\"left-footer\">\n        <span class=\"left-footer-text\">Open Source</span>\n        <span class=\"left-footer-dot\"></span>\n        <span class=\"left-footer-text\">Self-Hosted</span>\n        <span class=\"left-footer-dot\"></span>\n        <span class=\"left-footer-text\">Apache License 2.0</span>\n      </div>\n    </div>\n  </div>\n\n  <!-- Right Panel — Login Form -->\n  <div class=\"right-panel\">\n    <div class=\"right-content\">\n      <div class=\"form-header\">\n        <div class=\"form-title-row\">\n          <h1 class=\"form-title\">Sign in</h1>\n          <button\n            type=\"button\"\n            class=\"cant-login-link\"\n            onclick={() => (showSetupSheet = true)}\n          >\n            <BookOpen size={13} />\n            Can't login?\n          </button>\n        </div>\n        <p class=\"form-subtitle\">Connect to your ClickHouse instance</p>\n      </div>\n\n      <div class=\"secure-badge\">\n        <ShieldCheck size={12} />\n        <span>Credentials are sent directly to your server</span>\n      </div>\n\n      {#if loadingConnections}\n        <div class=\"loading-state\">\n          <Spinner />\n          <span class=\"loading-text\">Discovering connections...</span>\n        </div>\n      {:else if connections.length === 0}\n        <div class=\"empty-state\">\n          <Database size={28} class=\"empty-icon\" />\n          <p class=\"empty-title\">No connections configured</p>\n          <p class=\"empty-desc\">\n            No local connection is ready yet. Open setup and restart CH-UI with\n            the correct URL.\n          </p>\n          <button\n            type=\"button\"\n            class=\"empty-setup-btn\"\n            onclick={() => (showSetupSheet = true)}\n          >\n            <BookOpen size={14} />\n            Can't login? Open setup guide\n          </button>\n        </div>\n      {:else}\n        <form onsubmit={handleSubmit} class=\"login-form\">\n          <!-- Connection -->\n          <div class=\"field\">\n            <label class=\"field-label\" for=\"connection\">\n              <Database size={12} class=\"field-label-icon\" />\n              Connection\n            </label>\n            <Combobox\n              options={connections.map((conn) => ({\n                value: conn.id,\n                label: conn.name,\n                hint: conn.online ? \"Online\" : \"Offline\",\n                keywords: `${conn.name} ${conn.id}`,\n              }))}\n              value={selectedId}\n              placeholder=\"Select a connection...\"\n              onChange={(id) => (selectedId = id)}\n            />\n            {#if selectedId}\n              {@const selected = connections.find((c) => c.id === selectedId)}\n              {#if selected}\n                <div class=\"conn-status\">\n                  {#if selected.online}\n                    <Wifi size={11} class=\"status-online\" />\n                    <span class=\"status-text-online\">Connected</span>\n                  {:else}\n                    <WifiOff size={11} class=\"status-offline\" />\n                    <span class=\"status-text-offline\">Unreachable</span>\n                  {/if}\n                </div>\n              {/if}\n            {/if}\n          </div>\n\n          <!-- Username -->\n          <div class=\"field\">\n            <label class=\"field-label\" for=\"username\">Username</label>\n            <input\n              id=\"username\"\n              type=\"text\"\n              bind:value={username}\n              placeholder=\"default\"\n              autocomplete=\"username\"\n              class=\"field-input\"\n            />\n          </div>\n\n          <!-- Password -->\n          <div class=\"field\">\n            <label class=\"field-label\" for=\"password\">Password</label>\n            <input\n              id=\"password\"\n              type=\"password\"\n              bind:value={password}\n              placeholder=\"Optional\"\n              autocomplete=\"current-password\"\n              class=\"field-input\"\n            />\n          </div>\n\n          {#if error}\n            <div class=\"error-block\">\n              <div class=\"error-header\">\n                <AlertTriangle size={14} />\n                <p class=\"error-title\">{loginHelp?.title ?? \"Login failed\"}</p>\n              </div>\n              <p class=\"error-text\">{error}</p>\n              {#if loginHelp?.detail}\n                <p class=\"error-help\">{loginHelp.detail}</p>\n              {/if}\n              {#if showSetupRecoveryCTA}\n                <button\n                  type=\"button\"\n                  class=\"error-setup-btn\"\n                  onclick={() => (showSetupSheet = true)}\n                >\n                  <BookOpen size={13} />\n                  Can't login? Open setup guide\n                </button>\n                {#if errorKind === \"rateLimit\"}\n                  <p class=\"error-recovery-note\">\n                    If retries came from the wrong local URL, update setup and\n                    restart CH-UI before trying again.\n                  </p>\n                {/if}\n              {/if}\n            </div>\n          {/if}\n\n          <Button\n            type=\"submit\"\n            loading={submitting}\n            disabled={!canSubmit}\n          >\n            <span class=\"btn-inner\">\n              Connect\n              <ArrowRight size={14} />\n            </span>\n          </Button>\n        </form>\n      {/if}\n\n      <Sheet\n        open={showSetupSheet}\n        title=\"Can't Login? Setup Guide\"\n        size=\"lg\"\n        onclose={() => (showSetupSheet = false)}\n      >\n        <div class=\"setup-sheet\">\n          <p class=\"setup-sheet-intro\">\n            Set URL/name, run one command, restart CH-UI, then return to Sign in.\n          </p>\n          <div class=\"setup-sheet-inputs\">\n            <div class=\"field\">\n              <label class=\"field-label\" for=\"sheet-clickhouse-url\">\n                ClickHouse URL\n              </label>\n              <input\n                id=\"sheet-clickhouse-url\"\n                type=\"url\"\n                bind:value={setupClickHouseURL}\n                placeholder=\"http://localhost:8123\"\n                class=\"field-input\"\n              />\n            </div>\n            <div class=\"field\">\n              <label class=\"field-label\" for=\"sheet-connection-name\">\n                Connection Name\n              </label>\n              <input\n                id=\"sheet-connection-name\"\n                type=\"text\"\n                bind:value={setupConnectionName}\n                placeholder=\"Local ClickHouse\"\n                class=\"field-input\"\n              />\n            </div>\n          </div>\n          <ol class=\"setup-sheet-steps\">\n            <li>Stop any running <code>ch-ui server</code> process.</li>\n            <li>Run one setup command with your URL and connection name.</li>\n            <li>\n              Open <code>http://localhost:3488</code> and sign in with your ClickHouse\n              credentials.\n            </li>\n          </ol>\n\n          <div class=\"setup-command-block\">\n            <p class=\"setup-command-title\">Run with globally installed `ch-ui`</p>\n            <pre>{localCommand}</pre>\n          </div>\n\n          <div class=\"setup-command-block\">\n            <p class=\"setup-command-title\">Run with local binary</p>\n            <pre>{localCommandWithBinary}</pre>\n          </div>\n\n          <div class=\"setup-command-block\">\n            <p class=\"setup-command-title\">Run with Docker</p>\n            <pre>{dockerCommand}</pre>\n          </div>\n\n          <div class=\"setup-sheet-links\">\n            <a\n              href={quickHelpURL}\n              target=\"_blank\"\n              rel=\"noopener\"\n              class=\"setup-doc-link\"\n            >\n              Can't login? Quick path\n              <ExternalLink size={12} />\n            </a>\n            <a\n              href={cantLoginDocsURL}\n              target=\"_blank\"\n              rel=\"noopener\"\n              class=\"setup-doc-link\"\n            >\n              Full Can't login doc\n              <ExternalLink size={12} />\n            </a>\n            <a\n              href={dockerDocsURL}\n              target=\"_blank\"\n              rel=\"noopener\"\n              class=\"setup-doc-link\"\n            >\n              Docker Quick Start\n              <ExternalLink size={12} />\n            </a>\n          </div>\n\n          <p class=\"setup-security-note\">\n            Setup never stores ClickHouse credentials and commands never include\n            passwords.\n          </p>\n        </div>\n      </Sheet>\n\n      <div class=\"right-footer\">\n        <a\n          href=\"https://github.com/caioricciuti/ch-ui\"\n          target=\"_blank\"\n          rel=\"noopener\"\n          class=\"footer-link\"\n        >\n          GitHub\n        </a>\n        <span class=\"footer-sep\">/</span>\n        <a\n          href={cantLoginDocsURL}\n          target=\"_blank\"\n          rel=\"noopener\"\n          class=\"footer-link\"\n        >\n          Docs\n        </a>\n      </div>\n    </div>\n  </div>\n</div>\n\n<style>\n  /* ── Root layout ── */\n  .login-root {\n    display: flex;\n    min-height: 100vh;\n    min-height: 100dvh;\n    font-family:\n      \"DM Sans\",\n      \"SF Pro Display\",\n      -apple-system,\n      system-ui,\n      sans-serif;\n  }\n\n  /* ── Left panel ── */\n  .left-panel {\n    position: relative;\n    flex: 1 1 50%;\n    display: none;\n    background: linear-gradient(145deg, #0c1220 0%, #0f1a2e 40%, #132240 100%);\n    overflow: hidden;\n  }\n\n  @media (min-width: 960px) {\n    .left-panel {\n      display: flex;\n      align-items: center;\n      justify-content: center;\n    }\n  }\n\n  .left-panel-noise {\n    position: absolute;\n    inset: 0;\n    background-image: url(\"data:image/svg+xml,%3Csvg viewBox='0 0 256 256' xmlns='http://www.w3.org/2000/svg'%3E%3Cfilter id='n'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='0.85' numOctaves='4' stitchTiles='stitch'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23n)' opacity='0.03'/%3E%3C/svg%3E\");\n    background-size: 200px 200px;\n    pointer-events: none;\n    z-index: 1;\n  }\n\n  .left-panel-grid {\n    position: absolute;\n    inset: 0;\n    background-image: linear-gradient(\n        rgba(255, 255, 255, 0.015) 1px,\n        transparent 1px\n      ),\n      linear-gradient(90deg, rgba(255, 255, 255, 0.015) 1px, transparent 1px);\n    background-size: 48px 48px;\n    pointer-events: none;\n    z-index: 1;\n  }\n\n  .left-content {\n    position: relative;\n    z-index: 2;\n    max-width: 420px;\n    padding: 3rem 2.5rem;\n  }\n\n  /* Logo */\n  .logo-block {\n    display: flex;\n    align-items: center;\n    gap: 0.75rem;\n    margin-bottom: 2.5rem;\n  }\n\n  .logo-img {\n    width: 44px;\n    height: 44px;\n    border-radius: 12px;\n    border: 1px solid rgba(255, 255, 255, 0.08);\n  }\n\n  .logo-text {\n    display: flex;\n    align-items: baseline;\n    gap: 0.5rem;\n  }\n\n  .logo-name {\n    font-size: 1.25rem;\n    font-weight: 700;\n    color: #f0f4f8;\n    letter-spacing: -0.02em;\n  }\n\n  .logo-version {\n    font-size: 0.65rem;\n    font-weight: 500;\n    color: rgba(255, 255, 255, 0.3);\n    background: rgba(255, 255, 255, 0.05);\n    padding: 0.15rem 0.4rem;\n    border-radius: 4px;\n    letter-spacing: 0.04em;\n  }\n\n  /* Hero text */\n  .hero-title {\n    font-size: 2rem;\n    font-weight: 700;\n    line-height: 1.2;\n    color: #e2e8f0;\n    letter-spacing: -0.03em;\n    margin-bottom: 1rem;\n  }\n\n  .hero-accent {\n    background: linear-gradient(135deg, #facc15 0%, #f59e0b 100%);\n    -webkit-background-clip: text;\n    -webkit-text-fill-color: transparent;\n    background-clip: text;\n  }\n\n  .hero-sub {\n    font-size: 0.875rem;\n    line-height: 1.6;\n    color: rgba(226, 232, 240, 0.5);\n    margin-bottom: 2.5rem;\n    max-width: 340px;\n  }\n\n  /* Features list */\n  .features {\n    display: flex;\n    flex-direction: column;\n    gap: 1.25rem;\n    margin-bottom: 3rem;\n  }\n\n  .feature-item {\n    display: flex;\n    align-items: flex-start;\n    gap: 0.75rem;\n  }\n\n  .feature-icon {\n    flex-shrink: 0;\n    width: 32px;\n    height: 32px;\n    display: flex;\n    align-items: center;\n    justify-content: center;\n    background: rgba(255, 255, 255, 0.04);\n    border: 1px solid rgba(255, 255, 255, 0.06);\n    border-radius: 8px;\n    color: #facc15;\n  }\n\n  .feature-label {\n    display: block;\n    font-size: 0.8rem;\n    font-weight: 600;\n    color: #e2e8f0;\n    letter-spacing: -0.01em;\n    margin-bottom: 0.1rem;\n  }\n\n  .feature-desc {\n    display: block;\n    font-size: 0.72rem;\n    color: rgba(226, 232, 240, 0.4);\n    line-height: 1.4;\n  }\n\n  /* Left footer */\n  .left-footer {\n    display: flex;\n    align-items: center;\n    gap: 0.6rem;\n  }\n\n  .left-footer-text {\n    font-size: 0.68rem;\n    color: rgba(226, 232, 240, 0.3);\n    letter-spacing: 0.03em;\n    text-transform: uppercase;\n    font-weight: 500;\n  }\n\n  .left-footer-dot {\n    width: 3px;\n    height: 3px;\n    border-radius: 50%;\n    background: rgba(226, 232, 240, 0.15);\n  }\n\n  /* ── Right panel ── */\n  .right-panel {\n    flex: 1 1 50%;\n    display: flex;\n    align-items: center;\n    justify-content: center;\n    background: #fafbfc;\n    padding: 2rem 1.5rem;\n  }\n\n  :global(.dark) .right-panel {\n    background: #0d1117;\n  }\n\n  .right-content {\n    width: 100%;\n    max-width: 380px;\n  }\n\n  /* Form header */\n  .form-header {\n    margin-bottom: 1.75rem;\n  }\n\n  .form-title {\n    font-size: 1.5rem;\n    font-weight: 700;\n    color: #111827;\n    letter-spacing: -0.03em;\n    margin-bottom: 0.35rem;\n  }\n\n  :global(.dark) .form-title {\n    color: #f0f4f8;\n  }\n\n  .form-subtitle {\n    font-size: 0.8rem;\n    color: #6b7280;\n  }\n\n  :global(.dark) .form-subtitle {\n    color: #6b7280;\n  }\n\n  /* Secure badge */\n  .secure-badge {\n    display: inline-flex;\n    align-items: center;\n    gap: 0.35rem;\n    font-size: 0.68rem;\n    font-weight: 500;\n    color: #16a34a;\n    background: rgba(22, 163, 74, 0.06);\n    border: 1px solid rgba(22, 163, 74, 0.12);\n    border-radius: 6px;\n    padding: 0.3rem 0.6rem;\n    margin-bottom: 1.5rem;\n  }\n\n  :global(.dark) .secure-badge {\n    color: #4ade80;\n    background: rgba(74, 222, 128, 0.06);\n    border-color: rgba(74, 222, 128, 0.1);\n  }\n\n  .form-title-row {\n    display: flex;\n    align-items: center;\n    justify-content: space-between;\n    gap: 0.75rem;\n  }\n\n  .cant-login-link {\n    display: inline-flex;\n    align-items: center;\n    gap: 0.4rem;\n    border: 1px solid #f59e0b;\n    background: rgba(254, 243, 199, 0.7);\n    color: #9a3412;\n    border-radius: 9px;\n    font-size: 0.75rem;\n    font-weight: 700;\n    padding: 0.5rem 0.7rem;\n    cursor: pointer;\n    transition: all 0.15s;\n  }\n\n  .cant-login-link:hover {\n    background: rgba(254, 243, 199, 1);\n  }\n\n  :global(.dark) .cant-login-link {\n    border-color: rgba(245, 158, 11, 0.5);\n    background: rgba(245, 158, 11, 0.16);\n    color: #fbbf24;\n  }\n\n  .empty-setup-btn,\n  .error-setup-btn {\n    display: inline-flex;\n    align-items: center;\n    justify-content: center;\n    gap: 0.35rem;\n    border: 1px solid #f59e0b;\n    background: rgba(254, 243, 199, 0.65);\n    color: #9a3412;\n    border-radius: 9px;\n    font-size: 0.72rem;\n    font-weight: 700;\n    padding: 0.45rem 0.65rem;\n    cursor: pointer;\n    transition: all 0.15s;\n  }\n\n  .empty-setup-btn {\n    margin-top: 0.8rem;\n  }\n\n  .error-setup-btn {\n    margin-top: 0.5rem;\n  }\n\n  .empty-setup-btn:hover,\n  .error-setup-btn:hover {\n    background: rgba(254, 243, 199, 1);\n  }\n\n  :global(.dark) .empty-setup-btn,\n  :global(.dark) .error-setup-btn {\n    border-color: rgba(245, 158, 11, 0.55);\n    background: rgba(245, 158, 11, 0.16);\n    color: #fbbf24;\n  }\n\n  .setup-doc-link {\n    display: inline-flex;\n    align-items: center;\n    gap: 0.35rem;\n    font-size: 0.72rem;\n    color: #1d4ed8;\n    text-decoration: none;\n    font-weight: 600;\n  }\n\n  .setup-doc-link:hover {\n    color: #1e40af;\n  }\n\n  :global(.dark) .setup-doc-link {\n    color: #60a5fa;\n  }\n\n  :global(.dark) .setup-doc-link:hover {\n    color: #93c5fd;\n  }\n\n  .setup-sheet {\n    display: flex;\n    flex-direction: column;\n    gap: 0.9rem;\n  }\n\n  .setup-sheet-inputs {\n    display: grid;\n    gap: 0.85rem;\n  }\n\n  .setup-sheet code {\n    font-family: \"JetBrains Mono\", \"SFMono-Regular\", Menlo, monospace;\n    font-size: 0.72rem;\n    background: rgba(15, 23, 42, 0.06);\n    border: 1px solid rgba(15, 23, 42, 0.1);\n    border-radius: 6px;\n    padding: 0.08rem 0.3rem;\n  }\n\n  :global(.dark) .setup-sheet code {\n    background: rgba(148, 163, 184, 0.15);\n    border-color: rgba(148, 163, 184, 0.25);\n  }\n\n  .setup-sheet-intro {\n    margin: 0;\n    font-size: 0.8rem;\n    color: #374151;\n    line-height: 1.45;\n  }\n\n  :global(.dark) .setup-sheet-intro {\n    color: #d1d5db;\n  }\n\n  .setup-sheet-steps {\n    margin: 0;\n    padding-left: 1.1rem;\n    font-size: 0.77rem;\n    line-height: 1.45;\n    color: #4b5563;\n  }\n\n  .setup-sheet-steps li + li {\n    margin-top: 0.2rem;\n  }\n\n  :global(.dark) .setup-sheet-steps {\n    color: #9ca3af;\n  }\n\n  .setup-sheet-links {\n    display: flex;\n    gap: 0.9rem;\n    flex-wrap: wrap;\n  }\n\n  .setup-security-note {\n    margin: 0;\n    font-size: 0.72rem;\n    line-height: 1.4;\n    color: #9a3412;\n  }\n\n  :global(.dark) .setup-security-note {\n    color: #fbbf24;\n  }\n\n  .setup-command-block {\n    border: 1px solid #e5e7eb;\n    background: #ffffff;\n    border-radius: 10px;\n    padding: 0.6rem 0.7rem;\n  }\n\n  :global(.dark) .setup-command-block {\n    border-color: #2d3748;\n    background: rgba(255, 255, 255, 0.02);\n  }\n\n  .setup-command-title {\n    margin: 0 0 0.4rem;\n    font-size: 0.72rem;\n    text-transform: uppercase;\n    letter-spacing: 0.03em;\n    color: #6b7280;\n    font-weight: 700;\n  }\n\n  .setup-command-block pre {\n    margin: 0;\n    white-space: pre-wrap;\n    word-break: break-word;\n    font-size: 0.75rem;\n    line-height: 1.45;\n    color: #111827;\n    font-family: \"JetBrains Mono\", \"SFMono-Regular\", Menlo, monospace;\n  }\n\n  :global(.dark) .setup-command-block pre {\n    color: #e5e7eb;\n  }\n\n  /* Loading state */\n  .loading-state {\n    display: flex;\n    flex-direction: column;\n    align-items: center;\n    gap: 0.75rem;\n    padding: 3rem 0;\n  }\n\n  .loading-text {\n    font-size: 0.75rem;\n    color: #9ca3af;\n  }\n\n  /* Empty state */\n  .empty-state {\n    text-align: center;\n    padding: 2.5rem 1rem;\n  }\n\n  .empty-state :global(.empty-icon) {\n    color: #d1d5db;\n    margin: 0 auto 0.75rem;\n  }\n\n  :global(.dark) .empty-state :global(.empty-icon) {\n    color: #4b5563;\n  }\n\n  .empty-title {\n    font-size: 0.85rem;\n    font-weight: 600;\n    color: #374151;\n    margin-bottom: 0.3rem;\n  }\n\n  :global(.dark) .empty-title {\n    color: #d1d5db;\n  }\n\n  .empty-desc {\n    font-size: 0.75rem;\n    color: #9ca3af;\n    max-width: 280px;\n    margin: 0 auto;\n    line-height: 1.5;\n  }\n\n  /* Form */\n  .login-form {\n    display: flex;\n    flex-direction: column;\n    gap: 1.25rem;\n  }\n\n  .field {\n    display: flex;\n    flex-direction: column;\n  }\n\n  .field-label {\n    display: flex;\n    align-items: center;\n    gap: 0.35rem;\n    font-size: 0.72rem;\n    font-weight: 600;\n    color: #6b7280;\n    margin-bottom: 0.4rem;\n    text-transform: uppercase;\n    letter-spacing: 0.04em;\n  }\n\n  :global(.dark) .field-label {\n    color: #6b7280;\n  }\n\n  .field-input {\n    width: 100%;\n    background: white;\n    border: 1px solid #e5e7eb;\n    border-radius: 10px;\n    padding: 0.6rem 0.85rem;\n    font-size: 0.85rem;\n    color: #111827;\n    transition:\n      border-color 0.15s,\n      box-shadow 0.15s;\n    outline: none;\n    font-family: inherit;\n  }\n\n  .field-input::placeholder {\n    color: #c9cdd4;\n  }\n\n  .field-input:focus {\n    border-color: #3b82f6;\n    box-shadow: 0 0 0 3px rgba(59, 130, 246, 0.1);\n  }\n\n  :global(.dark) .field-input {\n    background: rgba(255, 255, 255, 0.03);\n    border-color: #2d3748;\n    color: #e2e8f0;\n  }\n\n  :global(.dark) .field-input::placeholder {\n    color: #4a5568;\n  }\n\n  :global(.dark) .field-input:focus {\n    border-color: #3b82f6;\n    box-shadow: 0 0 0 3px rgba(59, 130, 246, 0.15);\n  }\n\n  /* Connection status */\n  .conn-status {\n    display: flex;\n    align-items: center;\n    gap: 0.3rem;\n    margin-top: 0.4rem;\n  }\n\n  .status-text-online {\n    color: #16a34a;\n  }\n\n  :global(.dark) .status-text-online {\n    color: #4ade80;\n  }\n\n  .status-text-offline {\n    color: #d97706;\n  }\n\n  :global(.dark) .status-text-offline {\n    color: #fbbf24;\n  }\n\n  .status-text-online,\n  .status-text-offline {\n    font-size: 0.7rem;\n    font-weight: 500;\n  }\n\n  /* Error */\n  .error-block {\n    background: rgba(239, 68, 68, 0.06);\n    border: 1px solid rgba(239, 68, 68, 0.15);\n    border-radius: 8px;\n    padding: 0.5rem 0.75rem;\n  }\n\n  .error-header {\n    display: flex;\n    align-items: center;\n    gap: 0.35rem;\n    margin-bottom: 0.3rem;\n    color: #dc2626;\n  }\n\n  .error-title {\n    font-size: 0.75rem;\n    font-weight: 700;\n    color: inherit;\n  }\n\n  .error-text {\n    font-size: 0.78rem;\n    color: #dc2626;\n  }\n\n  :global(.dark) .error-text {\n    color: #f87171;\n  }\n\n  .error-help {\n    margin-top: 0.35rem;\n    font-size: 0.72rem;\n    color: #b45309;\n  }\n\n  :global(.dark) .error-help {\n    color: #fbbf24;\n  }\n\n  .error-recovery-note {\n    margin-top: 0.35rem;\n    font-size: 0.7rem;\n    line-height: 1.4;\n    color: #9a3412;\n  }\n\n  :global(.dark) .error-recovery-note {\n    color: #fbbf24;\n  }\n\n  /* Button inner */\n  .btn-inner {\n    display: flex;\n    align-items: center;\n    justify-content: center;\n    gap: 0.4rem;\n  }\n\n  /* Right footer */\n  .right-footer {\n    display: flex;\n    align-items: center;\n    justify-content: center;\n    gap: 0.5rem;\n    margin-top: 2rem;\n    padding-top: 1.25rem;\n    border-top: 1px solid #f0f0f0;\n  }\n\n  :global(.dark) .right-footer {\n    border-top-color: rgba(255, 255, 255, 0.05);\n  }\n\n  .footer-link {\n    font-size: 0.72rem;\n    color: #9ca3af;\n    text-decoration: none;\n    transition: color 0.15s;\n  }\n\n  .footer-link:hover {\n    color: #3b82f6;\n  }\n\n  .footer-sep {\n    font-size: 0.65rem;\n    color: #d1d5db;\n  }\n\n  :global(.dark) .footer-sep {\n    color: #374151;\n  }\n</style>\n"
  },
  {
    "path": "ui/src/pages/Models.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import type { Model, ModelRun, ModelRunResult, ModelSchedule, DAGNode, DAGEdge, Pipeline } from '../lib/types/models'\n  import * as api from '../lib/api/models'\n  import { refreshModelCache } from '../lib/editor/completions'\n  import { success as toastSuccess, error as toastError } from '../lib/stores/toast.svelte'\n  import { openModelTab } from '../lib/stores/tabs.svelte'\n  import ConfirmDialog from '../lib/components/common/ConfirmDialog.svelte'\n  import ContextMenu from '../lib/components/common/ContextMenu.svelte'\n  import type { ContextMenuItem } from '../lib/components/common/ContextMenu.svelte'\n  import {\n    SvelteFlow,\n    Controls,\n    Background,\n    type Node,\n    type Edge,\n    type NodeTypes,\n  } from '@xyflow/svelte'\n  import '@xyflow/svelte/dist/style.css'\n  import ModelNode from '../lib/components/models/ModelNode.svelte'\n  import { getTheme } from '../lib/stores/theme.svelte'\n  import {\n    Boxes,\n    Plus,\n    Play,\n    Trash2,\n    RefreshCw,\n    Eye,\n    MoreHorizontal,\n    Table2,\n    GitBranch,\n    History,\n    Save,\n    CheckCircle,\n    XCircle,\n    SkipForward,\n    Clock,\n    ChevronDown,\n    ChevronRight,\n    Timer,\n    X,\n    Info,\n  } from 'lucide-svelte'\n\n  // ── State ──────────────────────────────────────────────────────────\n\n  let models = $state<Model[]>([])\n  let loading = $state(true)\n\n  // DAG overlay\n  let showDAG = $state(false)\n  let dagNodes = $state<Node[]>([])\n  let dagEdges = $state<Edge[]>([])\n\n  // DAG edges for dependency pills (raw from API)\n  let dagEdgesRaw = $state<DAGEdge[]>([])\n\n  // model_id → list of upstream model names (what it depends on)\n  let upstreamMap = $derived.by(() => {\n    const map = new Map<string, string[]>()\n    const idToName = new Map(models.map(m => [m.id, m.name]))\n    for (const e of dagEdgesRaw) {\n      const name = idToName.get(e.source)\n      if (!name) continue\n      const list = map.get(e.target) ?? []\n      list.push(name)\n      map.set(e.target, list)\n    }\n    return map\n  })\n\n  // History overlay\n  let showHistory = $state(false)\n  let runs = $state<ModelRun[]>([])\n  let expandedRunId = $state<string | null>(null)\n  let runResults = $state<Record<string, ModelRunResult[]>>({})\n\n  // Run state\n  let running = $state(false)\n\n  // Info banner\n  let infoDismissed = $state(localStorage.getItem('chui-pipeline-info-dismissed') === '1')\n\n  function dismissInfo() {\n    infoDismissed = true\n    localStorage.setItem('chui-pipeline-info-dismissed', '1')\n  }\n\n  // Pipelines\n  let pipelines = $state<Pipeline[]>([])\n\n  // Per-pipeline run state\n  let runningPipeline = $state<string | null>(null)\n\n  // Schedule (per-pipeline)\n  let showSchedule = $state(false)\n  let scheduleAnchorId = $state<string | null>(null)\n  let schedule = $state<ModelSchedule | null>(null)\n  let schedCron = $state('0 */6 * * *')\n  let schedSaving = $state(false)\n\n  // Derived: model lookup by id\n  let modelById = $derived(new Map(models.map(m => [m.id, m])))\n\n  // Context menu\n  let contextMenu = $state<{ model: Model; x: number; y: number } | null>(null)\n\n  function openContextMenuFromButton(event: MouseEvent, model: Model) {\n    event.preventDefault()\n    event.stopPropagation()\n    const rect = (event.currentTarget as HTMLElement).getBoundingClientRect()\n    contextMenu = {\n      model,\n      x: Math.min(window.innerWidth - 240, rect.right),\n      y: Math.min(window.innerHeight - 220, rect.bottom + 6),\n    }\n  }\n\n  function openContextMenu(event: MouseEvent, model: Model) {\n    event.preventDefault()\n    event.stopPropagation()\n    contextMenu = {\n      model,\n      x: Math.min(window.innerWidth - 240, event.clientX),\n      y: Math.min(window.innerHeight - 220, event.clientY),\n    }\n  }\n\n  function closeContextMenu() {\n    contextMenu = null\n  }\n\n  function getContextItems(): ContextMenuItem[] {\n    const m = contextMenu?.model\n    if (!m) return []\n    return [\n      { id: 'open', label: 'Open', icon: Eye, onSelect: () => selectModel(m.id) },\n      { id: 'sep1', separator: true },\n      { id: 'delete', label: 'Delete', icon: Trash2, danger: true, onSelect: () => handleDelete(m.id) },\n    ]\n  }\n\n  // Delete confirm\n  let confirmDeleteOpen = $state(false)\n  let confirmDeleteLoading = $state(false)\n  let pendingDeleteId = $state('')\n  let pendingDeleteName = $state('')\n\n  const theme = $derived(getTheme())\n\n  const nodeTypes: NodeTypes = {\n    model: ModelNode as any,\n  }\n\n  // ── Lifecycle ──────────────────────────────────────────────────────\n\n  onMount(async () => {\n    await loadModels()\n    loadDAG()\n    loadPipelines()\n  })\n\n  // ── Data loading ───────────────────────────────────────────────────\n\n  async function loadModels() {\n    loading = true\n    try {\n      const res = await api.listModels()\n      models = res.models ?? []\n    } catch (e: unknown) {\n      toastError((e as Error).message || 'Failed to load models')\n    } finally {\n      loading = false\n    }\n  }\n\n  async function loadDAG() {\n    try {\n      const res = await api.getDAG()\n      dagEdgesRaw = res.edges ?? []\n      dagNodes = (res.nodes ?? []).map((n: DAGNode) => ({\n        id: n.id,\n        type: 'model',\n        position: n.position,\n        data: n.data,\n      }))\n      dagEdges = dagEdgesRaw.map((e: DAGEdge) => ({\n        id: e.id,\n        source: e.source,\n        target: e.target,\n        animated: true,\n        style: 'stroke: #f97316; stroke-width: 2px;',\n      }))\n    } catch (e: unknown) {\n      toastError((e as Error).message || 'Failed to load DAG')\n    }\n  }\n\n  async function loadRuns() {\n    try {\n      const res = await api.listModelRuns()\n      runs = res.runs ?? []\n    } catch (e: unknown) {\n      toastError((e as Error).message || 'Failed to load runs')\n    }\n  }\n\n  async function loadRunResults(runId: string) {\n    if (runResults[runId]) return\n    try {\n      const res = await api.getModelRun(runId)\n      runResults[runId] = res.results ?? []\n    } catch (e: unknown) {\n      toastError((e as Error).message || 'Failed to load run results')\n    }\n  }\n\n  async function loadPipelines() {\n    try {\n      const res = await api.listPipelines()\n      pipelines = res.pipelines ?? []\n    } catch {\n      // Pipelines not available — ignore\n    }\n  }\n\n  // ── Actions ────────────────────────────────────────────────────────\n\n  async function handleSaveSchedule() {\n    if (!scheduleAnchorId) return\n    schedSaving = true\n    try {\n      const res = await api.upsertPipelineSchedule(scheduleAnchorId, { cron: schedCron, enabled: true })\n      schedule = res.schedule\n      toastSuccess('Schedule saved')\n      await loadPipelines()\n    } catch (e: unknown) {\n      toastError((e as Error).message || 'Failed to save schedule')\n    } finally {\n      schedSaving = false\n    }\n  }\n\n  async function handleDeleteSchedule() {\n    if (!scheduleAnchorId) return\n    schedSaving = true\n    try {\n      await api.deletePipelineSchedule(scheduleAnchorId)\n      schedule = null\n      schedCron = '0 */6 * * *'\n      toastSuccess('Schedule removed')\n      showSchedule = false\n      await loadPipelines()\n    } catch (e: unknown) {\n      toastError((e as Error).message || 'Failed to delete schedule')\n    } finally {\n      schedSaving = false\n    }\n  }\n\n  async function handleRunPipeline(anchorId: string) {\n    runningPipeline = anchorId\n    try {\n      const res = await api.runPipeline(anchorId)\n      toastSuccess('Pipeline run started')\n      await loadModels()\n      await loadRuns()\n      if (res.run_id) {\n        expandedRunId = res.run_id\n        await loadRunResults(res.run_id)\n      }\n      showHistory = true\n    } catch (e: unknown) {\n      toastError((e as Error).message || 'Failed to run pipeline')\n    } finally {\n      runningPipeline = null\n    }\n  }\n\n  function openPipelineSchedule(anchorId: string, existing: ModelSchedule | null) {\n    scheduleAnchorId = anchorId\n    schedule = existing\n    schedCron = existing?.cron ?? '0 */6 * * *'\n    showSchedule = true\n  }\n\n  async function handleCreate() {\n    const existing = new Set(models.map(m => m.name))\n    let name = 'new_model'\n    let i = 1\n    while (existing.has(name)) {\n      name = `new_model_${i++}`\n    }\n    try {\n      const boilerplate = `SELECT *\\nFROM default.my_table\\nLIMIT 100`\n      const res = await api.createModel({\n        name,\n        target_database: 'default',\n        materialization: 'view',\n        sql_body: boilerplate,\n      })\n      refreshModelCache()\n      toastSuccess('Model created')\n      await loadModels()\n      await loadPipelines()\n      openModelTab(res.model)\n    } catch (e: unknown) {\n      toastError((e as Error).message || 'Failed to create model')\n    }\n  }\n\n  function handleDelete(id: string) {\n    const model = models.find(m => m.id === id)\n    pendingDeleteId = id\n    pendingDeleteName = model?.name ?? ''\n    confirmDeleteOpen = true\n  }\n\n  async function confirmDelete() {\n    confirmDeleteLoading = true\n    try {\n      await api.deleteModel(pendingDeleteId)\n      refreshModelCache()\n      toastSuccess('Model deleted')\n      await loadModels()\n      await loadPipelines()\n    } catch (e: unknown) {\n      toastError((e as Error).message || 'Failed to delete model')\n    } finally {\n      confirmDeleteLoading = false\n      confirmDeleteOpen = false\n      pendingDeleteId = ''\n      pendingDeleteName = ''\n    }\n  }\n\n  async function handleRunAll() {\n    running = true\n    try {\n      const res = await api.runAllModels()\n      toastSuccess('Model run started')\n      await loadModels()\n      await loadRuns()\n      if (res.run_id) {\n        expandedRunId = res.run_id\n        await loadRunResults(res.run_id)\n      }\n      showHistory = true\n    } catch (e: unknown) {\n      toastError((e as Error).message || 'Failed to run models')\n    } finally {\n      running = false\n    }\n  }\n\n  // ── Navigation ─────────────────────────────────────────────────────\n\n  function selectModel(id: string) {\n    const model = models.find(m => m.id === id)\n    if (!model) return\n    openModelTab(model)\n  }\n\n  function openDAG() {\n    loadDAG()\n    showDAG = true\n  }\n\n  function openHistory() {\n    loadRuns()\n    showHistory = true\n  }\n\n  function toggleRunExpand(runId: string) {\n    if (expandedRunId === runId) {\n      expandedRunId = null\n    } else {\n      expandedRunId = runId\n      loadRunResults(runId)\n    }\n  }\n\n  // ── Helpers ────────────────────────────────────────────────────────\n\n  function formatDate(d: string | null): string {\n    if (!d) return '\\u2014'\n    return new Date(d).toLocaleString()\n  }\n\n  function statusBadge(status: string): string {\n    switch (status) {\n      case 'success': return 'bg-green-100 text-green-700 dark:bg-green-900/30 dark:text-green-400'\n      case 'error': return 'bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-400'\n      case 'partial': return 'bg-yellow-100 text-yellow-700 dark:bg-yellow-900/30 dark:text-yellow-400'\n      case 'running': return 'bg-blue-100 text-blue-700 dark:bg-blue-900/30 dark:text-blue-400'\n      default: return 'bg-gray-100 text-gray-600 dark:bg-gray-800 dark:text-gray-400'\n    }\n  }\n\n  function statusDot(status: string): string {\n    switch (status) {\n      case 'success': return 'bg-green-500'\n      case 'error': return 'bg-red-500'\n      default: return 'bg-gray-400'\n    }\n  }\n</script>\n\n<div class=\"flex flex-col h-full overflow-hidden\">\n  <!-- ─── Toolbar ─────────────────────────────────────────────────── -->\n  <div class=\"flex items-center gap-3 px-4 py-2.5 border-b border-gray-200 dark:border-gray-700 bg-white dark:bg-gray-950 shrink-0\">\n    <Boxes size={18} class=\"text-orange-500 shrink-0\" />\n    <h1 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">Model Pipeline</h1>\n    {#if !loading}\n      <span class=\"text-[10px] text-gray-400 dark:text-gray-500 tabular-nums\">{models.length} model{models.length !== 1 ? 's' : ''}</span>\n    {/if}\n    <div class=\"flex-1\"></div>\n    <button\n      onclick={handleRunAll}\n      disabled={running || models.length === 0}\n      class=\"flex items-center gap-1.5 text-xs px-2.5 py-1.5 rounded text-gray-600 dark:text-gray-300 hover:text-green-600 hover:bg-green-50 dark:hover:bg-green-900/20 disabled:opacity-40 transition-colors\"\n      title=\"Run all models in dependency order\"\n    >\n      <Play size={13} /> {running ? 'Running...' : 'Run Pipeline'}\n    </button>\n    <button\n      onclick={openDAG}\n      disabled={models.length === 0}\n      class=\"flex items-center gap-1.5 text-xs px-2.5 py-1.5 rounded text-gray-600 dark:text-gray-300 hover:text-orange-500 hover:bg-orange-50 dark:hover:bg-orange-900/20 disabled:opacity-40 transition-colors\"\n      title=\"Dependency graph\"\n    >\n      <GitBranch size={13} /> DAG\n    </button>\n    <button\n      onclick={openHistory}\n      class=\"flex items-center gap-1.5 text-xs px-2.5 py-1.5 rounded text-gray-600 dark:text-gray-300 hover:text-orange-500 hover:bg-orange-50 dark:hover:bg-orange-900/20 transition-colors\"\n      title=\"Run history\"\n    >\n      <History size={13} /> History\n    </button>\n    <button\n      onclick={handleCreate}\n      class=\"flex items-center gap-1.5 text-xs px-3 py-1.5 rounded bg-orange-500 text-white hover:bg-orange-600 transition-colors font-medium\"\n    >\n      <Plus size={14} /> New Model\n    </button>\n  </div>\n\n  <!-- ─── Content ─────────────────────────────────────────────────── -->\n  <div class=\"flex-1 min-h-0 overflow-auto\">\n    {#if loading}\n      <div class=\"flex items-center justify-center h-full text-gray-400 text-sm\">Loading...</div>\n    {:else if models.length === 0}\n      <div class=\"flex flex-col items-center justify-center h-full gap-4 text-gray-400 dark:text-gray-500\">\n        <Boxes size={56} strokeWidth={1} class=\"opacity-20\" />\n        <div class=\"text-center space-y-1\">\n          <p class=\"text-sm font-medium text-gray-500 dark:text-gray-400\">No models yet</p>\n          <p class=\"text-xs\">Models are SQL transformations that form a pipeline. They can reference each other with <code class=\"text-[11px] px-1 py-0.5 rounded bg-gray-100 dark:bg-gray-800 font-mono\">$ref(model_name)</code> and run in dependency order.</p>\n        </div>\n        <button\n          onclick={handleCreate}\n          class=\"flex items-center gap-1.5 text-xs px-4 py-2 rounded bg-orange-500 text-white hover:bg-orange-600 transition-colors font-medium\"\n        >\n          <Plus size={14} /> Create your first model\n        </button>\n      </div>\n    {:else}\n      {#if !infoDismissed}\n        <div class=\"mx-4 mt-4 mb-0 flex items-start gap-2.5 px-3 py-2.5 rounded-lg border border-blue-200 dark:border-blue-800/50 bg-blue-50/70 dark:bg-blue-950/30 text-xs text-gray-600 dark:text-gray-400 leading-relaxed\">\n          <Info size={14} class=\"text-blue-400 shrink-0 mt-0.5\" />\n          <p class=\"flex-1\">\n            Models are SQL transformations that form a pipeline. Use <code class=\"text-[11px] px-1 py-0.5 rounded bg-blue-100 dark:bg-blue-900/40 font-mono text-blue-700 dark:text-blue-300\">$ref(model_name)</code> to reference other models.\n            <span class=\"font-semibold\">Run Pipeline</span> executes all models in dependency order — if a model fails, its dependents are automatically skipped.\n          </p>\n          <button onclick={dismissInfo} class=\"text-gray-400 hover:text-gray-600 dark:hover:text-gray-300 shrink-0 mt-0.5\" title=\"Dismiss\">\n            <X size={14} />\n          </button>\n        </div>\n      {/if}\n      <div class=\"p-4 space-y-4\">\n        {#each pipelines as pipeline (pipeline.anchor_model_id)}\n          {@const pipelineModels = pipeline.model_ids.map(id => modelById.get(id)).filter((m): m is Model => !!m)}\n          {#if pipelineModels.length > 0}\n            <div class=\"rounded-lg border border-gray-200 dark:border-gray-700/80 bg-white dark:bg-gray-900/30 overflow-hidden\">\n              <!-- Pipeline header -->\n              <div class=\"flex items-center gap-2.5 px-4 py-2.5 bg-gray-50 dark:bg-gray-900/50 border-b border-gray-200 dark:border-gray-700/60\">\n                <Boxes size={14} class=\"text-orange-400 shrink-0\" />\n                <span class=\"text-xs font-medium text-gray-600 dark:text-gray-300\">\n                  {pipelineModels.length} model{pipelineModels.length !== 1 ? 's' : ''}\n                </span>\n                <div class=\"flex-1\"></div>\n                <button\n                  onclick={() => handleRunPipeline(pipeline.anchor_model_id)}\n                  disabled={runningPipeline === pipeline.anchor_model_id || running}\n                  class=\"flex items-center gap-1 text-[11px] px-2 py-1 rounded text-gray-500 dark:text-gray-400 hover:text-green-600 hover:bg-green-50 dark:hover:bg-green-900/20 disabled:opacity-40 transition-colors\"\n                  title=\"Run this pipeline\"\n                >\n                  <Play size={11} />\n                  {runningPipeline === pipeline.anchor_model_id ? 'Running...' : 'Run'}\n                </button>\n                <button\n                  onclick={() => openPipelineSchedule(pipeline.anchor_model_id, pipeline.schedule)}\n                  class=\"flex items-center gap-1 text-[11px] px-2 py-1 rounded transition-colors\n                    {pipeline.schedule\n                      ? 'text-orange-600 dark:text-orange-400 bg-orange-50 dark:bg-orange-900/20 font-mono'\n                      : 'text-gray-500 dark:text-gray-400 hover:text-orange-500 hover:bg-orange-50 dark:hover:bg-orange-900/20'}\"\n                  title={pipeline.schedule ? `Schedule: ${pipeline.schedule.cron}` : 'No schedule'}\n                >\n                  <Timer size={11} />\n                  {pipeline.schedule ? pipeline.schedule.cron : 'No schedule'}\n                </button>\n              </div>\n              <!-- Model cards grid -->\n              <div class=\"p-3 grid gap-3 grid-cols-[repeat(auto-fill,minmax(240px,1fr))]\">\n                {#each pipelineModels as model (model.id)}\n                  <!-- svelte-ignore a11y_no_static_element_interactions -->\n                  <div\n                    class=\"group relative flex flex-col gap-2 p-3.5 rounded-lg border border-gray-200 dark:border-gray-700/80 bg-white dark:bg-gray-900/50 hover:border-orange-300 dark:hover:border-orange-500/40 hover:shadow-sm cursor-pointer transition-all\"\n                    onclick={() => selectModel(model.id)}\n                    oncontextmenu={(e) => openContextMenu(e, model)}\n                    onkeydown={(e: KeyboardEvent) => { if (e.key === 'Enter') selectModel(model.id) }}\n                    role=\"button\"\n                    tabindex=\"0\"\n                  >\n                    <div class=\"flex items-center gap-2\">\n                      {#if model.materialization === 'table'}\n                        <Table2 size={14} class=\"text-orange-400 shrink-0\" />\n                      {:else}\n                        <Eye size={14} class=\"text-blue-400 shrink-0\" />\n                      {/if}\n                      <span class=\"text-sm font-semibold text-gray-800 dark:text-gray-200 truncate flex-1\">{model.name}</span>\n                      <span class=\"w-2 h-2 rounded-full {statusDot(model.status)} shrink-0\" title={model.status}></span>\n                      <button\n                        onclick={(e) => openContextMenuFromButton(e, model)}\n                        class=\"p-1 rounded opacity-0 group-hover:opacity-100 text-gray-400 hover:text-gray-600 dark:hover:text-gray-300 hover:bg-gray-200/70 dark:hover:bg-gray-800/70 transition-all\"\n                        title=\"More actions\"\n                      >\n                        <MoreHorizontal size={15} />\n                      </button>\n                    </div>\n                    <div class=\"flex items-center gap-2 text-[11px] text-gray-400 dark:text-gray-500\">\n                      <span class=\"px-1.5 py-0.5 rounded bg-gray-100 dark:bg-gray-800 text-gray-500 dark:text-gray-400 font-medium\">\n                        {model.materialization}\n                      </span>\n                      <span class=\"truncate\">{model.target_database}</span>\n                      {#if model.last_run_at}\n                        <span class=\"ml-auto shrink-0\" title=\"Last run\">{formatDate(model.last_run_at)}</span>\n                      {/if}\n                    </div>\n                    {#if upstreamMap.get(model.id)?.length}\n                      <div class=\"flex items-center gap-1.5 flex-wrap\">\n                        <GitBranch size={11} class=\"text-orange-400 shrink-0\" />\n                        {#each upstreamMap.get(model.id)! as dep}\n                          <span class=\"text-[10px] px-1.5 py-0.5 rounded-full bg-orange-100 dark:bg-orange-900/30 text-orange-600 dark:text-orange-400 font-medium\">{dep}</span>\n                        {/each}\n                      </div>\n                    {/if}\n                    {#if model.last_error}\n                      <p class=\"text-[11px] text-red-500 dark:text-red-400 truncate\" title={model.last_error}>{model.last_error}</p>\n                    {:else if model.description}\n                      <p class=\"text-[11px] text-gray-400 dark:text-gray-500 truncate\">{model.description}</p>\n                    {/if}\n                  </div>\n                {/each}\n              </div>\n            </div>\n          {/if}\n        {/each}\n      </div>\n    {/if}\n  </div>\n\n  <!-- ─── Schedule Footer ──────────────────────────────────────────── -->\n  {#if !loading && pipelines.length > 0}\n    {@const scheduledPipelines = pipelines.filter(p => p.schedule)}\n    <div class=\"shrink-0 flex items-center gap-2 px-4 py-2 border-t border-gray-200 dark:border-gray-700 bg-gray-50 dark:bg-gray-900/50 text-xs text-gray-500 dark:text-gray-400\">\n      <Timer size={13} class=\"shrink-0 text-gray-400 dark:text-gray-500\" />\n      {#if scheduledPipelines.length > 0}\n        <span>{scheduledPipelines.length} pipeline{scheduledPipelines.length !== 1 ? 's' : ''} scheduled</span>\n        {#each scheduledPipelines as sp}\n          {@const anchorModel = modelById.get(sp.anchor_model_id)}\n          <span class=\"text-gray-300 dark:text-gray-600\">·</span>\n          <span class=\"font-mono text-gray-600 dark:text-gray-300\" title={anchorModel?.name ?? sp.anchor_model_id}>\n            {sp.schedule?.cron}\n          </span>\n          {#if sp.schedule?.last_status}\n            <span class=\"{sp.schedule.last_status === 'success' ? 'text-green-600 dark:text-green-400' : sp.schedule.last_status === 'error' ? 'text-red-600 dark:text-red-400' : ''} font-medium\">\n              {sp.schedule.last_status}\n            </span>\n          {/if}\n        {/each}\n      {:else}\n        <span>No pipelines scheduled</span>\n      {/if}\n    </div>\n  {/if}\n</div>\n\n<!-- ─── DAG Overlay ──────────────────────────────────────────────── -->\n{#if showDAG}\n  <!-- svelte-ignore a11y_no_static_element_interactions -->\n  <div class=\"fixed inset-0 z-50 flex flex-col bg-white dark:bg-gray-950\" role=\"dialog\" tabindex=\"-1\">\n    <div class=\"flex items-center gap-3 px-4 py-3 border-b border-gray-200 dark:border-gray-700 shrink-0\">\n      <GitBranch size={18} class=\"text-orange-500\" />\n      <h2 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200 flex-1\">Dependency Graph</h2>\n      <button\n        onclick={loadDAG}\n        class=\"text-xs text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 flex items-center gap-1\"\n      >\n        <RefreshCw size={12} /> Refresh\n      </button>\n      <button\n        onclick={() => { showDAG = false }}\n        class=\"text-xs px-3 py-1 rounded border border-gray-300 dark:border-gray-600 text-gray-600 dark:text-gray-400 hover:bg-gray-100 dark:hover:bg-gray-800\"\n      >\n        Close\n      </button>\n    </div>\n    <div class=\"flex-1 min-h-0\">\n      {#if dagNodes.length === 0}\n        <div class=\"flex items-center justify-center h-full text-gray-400 text-sm\">\n          {models.length === 0 ? 'No models to show' : 'Loading DAG...'}\n        </div>\n      {:else}\n        <SvelteFlow\n          nodes={dagNodes}\n          edges={dagEdges}\n          {nodeTypes}\n          fitView\n          colorMode={theme === 'dark' ? 'dark' : 'light'}\n          onnodeclick={({ node }) => { showDAG = false; selectModel(node.id); }}\n        >\n          <Background />\n          <Controls />\n        </SvelteFlow>\n      {/if}\n    </div>\n  </div>\n{/if}\n\n<!-- ─── History Overlay ──────────────────────────────────────────── -->\n{#if showHistory}\n  <!-- svelte-ignore a11y_no_static_element_interactions -->\n  <div class=\"fixed inset-0 z-50 flex flex-col bg-white dark:bg-gray-950\" role=\"dialog\" tabindex=\"-1\">\n    <div class=\"flex items-center gap-3 px-4 py-3 border-b border-gray-200 dark:border-gray-700 shrink-0\">\n      <History size={18} class=\"text-orange-500\" />\n      <h2 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200 flex-1\">Run History</h2>\n      <button\n        onclick={loadRuns}\n        class=\"text-xs text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 flex items-center gap-1\"\n      >\n        <RefreshCw size={12} /> Refresh\n      </button>\n      <button\n        onclick={() => { showHistory = false }}\n        class=\"text-xs px-3 py-1 rounded border border-gray-300 dark:border-gray-600 text-gray-600 dark:text-gray-400 hover:bg-gray-100 dark:hover:bg-gray-800\"\n      >\n        Close\n      </button>\n    </div>\n    <div class=\"flex-1 min-h-0 overflow-auto\">\n      {#if runs.length === 0}\n        <div class=\"flex items-center justify-center h-64 text-gray-400 text-sm\">No runs yet</div>\n      {:else}\n        <div class=\"p-4 space-y-2\">\n          {#each runs as run (run.id)}\n            <div class=\"border border-gray-200 dark:border-gray-700 rounded-lg overflow-hidden\">\n              <button\n                onclick={() => toggleRunExpand(run.id)}\n                class=\"w-full flex items-center gap-3 px-4 py-3 hover:bg-gray-50 dark:hover:bg-gray-800/50 transition-colors text-left\"\n              >\n                {#if expandedRunId === run.id}\n                  <ChevronDown size={14} class=\"text-gray-400 shrink-0\" />\n                {:else}\n                  <ChevronRight size={14} class=\"text-gray-400 shrink-0\" />\n                {/if}\n                <span class=\"text-[10px] px-2 py-0.5 rounded-full {statusBadge(run.status)} font-medium uppercase tracking-wide\">\n                  {run.status}\n                </span>\n                <span class=\"text-xs text-gray-600 dark:text-gray-300 flex-1\">\n                  {run.total_models} models\n                  <span class=\"text-gray-400\">|</span>\n                  <span class=\"text-green-600 dark:text-green-400\">{run.succeeded} ok</span>\n                  {#if run.failed > 0}\n                    <span class=\"text-gray-400\">|</span>\n                    <span class=\"text-red-600 dark:text-red-400\">{run.failed} failed</span>\n                  {/if}\n                  {#if run.skipped > 0}\n                    <span class=\"text-gray-400\">|</span>\n                    <span class=\"text-gray-500\">{run.skipped} skipped</span>\n                  {/if}\n                </span>\n                <span class=\"text-[10px] text-gray-400\">{formatDate(run.started_at)}</span>\n              </button>\n\n              {#if expandedRunId === run.id && runResults[run.id]}\n                <div class=\"border-t border-gray-200 dark:border-gray-700\">\n                  {#each runResults[run.id] as result (result.id)}\n                    <div class=\"flex items-center gap-3 px-4 py-2 text-xs border-b border-gray-100 dark:border-gray-800 last:border-0\">\n                      <span class=\"shrink-0\">\n                        {#if result.status === 'success'}\n                          <CheckCircle size={14} class=\"text-green-500\" />\n                        {:else if result.status === 'error'}\n                          <XCircle size={14} class=\"text-red-500\" />\n                        {:else if result.status === 'skipped'}\n                          <SkipForward size={14} class=\"text-gray-400\" />\n                        {:else}\n                          <Clock size={14} class=\"text-blue-400\" />\n                        {/if}\n                      </span>\n                      <span class=\"font-medium text-gray-700 dark:text-gray-300 min-w-[120px]\">{result.model_name}</span>\n                      <span class=\"text-[10px] px-1.5 py-0.5 rounded-full {statusBadge(result.status)}\">{result.status}</span>\n                      <span class=\"text-gray-400\">{result.elapsed_ms}ms</span>\n                      {#if result.error}\n                        <span class=\"text-red-500 truncate flex-1\" title={result.error}>{result.error}</span>\n                      {/if}\n                    </div>\n                  {/each}\n                </div>\n              {/if}\n            </div>\n          {/each}\n        </div>\n      {/if}\n    </div>\n  </div>\n{/if}\n\n<!-- ─── Schedule Overlay ─────────────────────────────────────────── -->\n{#if showSchedule}\n  <!-- svelte-ignore a11y_no_static_element_interactions -->\n  <div\n    class=\"fixed inset-0 z-50 flex items-center justify-center bg-black/40\"\n    role=\"dialog\"\n    tabindex=\"-1\"\n    onkeydown={(e: KeyboardEvent) => { if (e.key === 'Escape') showSchedule = false }}\n    onclick={() => { showSchedule = false }}\n  >\n    <!-- svelte-ignore a11y_no_static_element_interactions -->\n    <div\n      class=\"bg-white dark:bg-gray-900 rounded-lg shadow-xl w-full max-w-md border border-gray-200 dark:border-gray-700\"\n      onclick={(e: MouseEvent) => e.stopPropagation()}\n      onkeydown={() => {}}\n    >\n      <div class=\"flex items-center gap-2 px-4 py-3 border-b border-gray-200 dark:border-gray-700\">\n        <Timer size={16} class=\"text-orange-500\" />\n        <h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200 flex-1\">\n          Pipeline Schedule\n          {#if scheduleAnchorId}\n            {@const anchorModel = modelById.get(scheduleAnchorId)}\n            {#if anchorModel}\n              <span class=\"font-normal text-gray-400 dark:text-gray-500 ml-1\">({anchorModel.name})</span>\n            {/if}\n          {/if}\n        </h3>\n        <button onclick={() => { showSchedule = false }} class=\"text-gray-400 hover:text-gray-600 dark:hover:text-gray-300\">\n          <X size={16} />\n        </button>\n      </div>\n\n      <div class=\"p-4 space-y-4\">\n        <!-- Cron input -->\n        <p class=\"text-[11px] text-gray-400 dark:text-gray-500\">Runs the models in this pipeline group in dependency order on this schedule.</p>\n        <div>\n          <label for=\"sched-cron\" class=\"block text-xs font-medium text-gray-600 dark:text-gray-400 mb-1\">Cron Expression (5-field)</label>\n          <input\n            id=\"sched-cron\"\n            type=\"text\"\n            bind:value={schedCron}\n            placeholder=\"0 */6 * * *\"\n            class=\"w-full text-sm border border-gray-300 dark:border-gray-600 rounded px-3 py-1.5 bg-transparent text-gray-800 dark:text-gray-200 focus:border-orange-400 focus:outline-none font-mono\"\n          />\n        </div>\n\n        <!-- Presets -->\n        <div class=\"flex flex-wrap gap-1.5\">\n          {#each [\n            { label: 'Every hour', cron: '0 * * * *' },\n            { label: 'Every 6h', cron: '0 */6 * * *' },\n            { label: 'Daily midnight', cron: '0 0 * * *' },\n            { label: 'Weekly Mon 2am', cron: '0 2 * * 1' },\n          ] as preset}\n            <button\n              onclick={() => { schedCron = preset.cron }}\n              class=\"text-[10px] px-2 py-1 rounded border transition-colors\n                {schedCron === preset.cron\n                  ? 'border-orange-400 bg-orange-50 dark:bg-orange-900/20 text-orange-600 dark:text-orange-400'\n                  : 'border-gray-300 dark:border-gray-600 text-gray-500 dark:text-gray-400 hover:border-gray-400'}\"\n            >\n              {preset.label}\n            </button>\n          {/each}\n        </div>\n\n        <!-- Status info -->\n        {#if schedule}\n          <div class=\"text-xs space-y-1 text-gray-500 dark:text-gray-400 bg-gray-50 dark:bg-gray-800/50 rounded p-3\">\n            <div class=\"flex justify-between\">\n              <span>Status</span>\n              <span class=\"font-medium {schedule.last_status === 'success' ? 'text-green-600 dark:text-green-400' : schedule.last_status === 'error' ? 'text-red-600 dark:text-red-400' : ''}\">\n                {schedule.last_status ?? 'pending'}\n              </span>\n            </div>\n            <div class=\"flex justify-between\">\n              <span>Next run</span>\n              <span>{formatDate(schedule.next_run_at)}</span>\n            </div>\n            <div class=\"flex justify-between\">\n              <span>Last run</span>\n              <span>{formatDate(schedule.last_run_at)}</span>\n            </div>\n            {#if schedule.last_error}\n              <div class=\"text-red-500 dark:text-red-400 mt-1 text-[10px] break-all\">{schedule.last_error}</div>\n            {/if}\n          </div>\n        {/if}\n\n        <!-- Actions -->\n        <div class=\"flex items-center gap-2 pt-2\">\n          <button\n            onclick={handleSaveSchedule}\n            disabled={schedSaving || !schedCron.trim()}\n            class=\"flex items-center gap-1 text-xs px-3 py-1.5 rounded bg-orange-500 text-white hover:bg-orange-600 disabled:opacity-50 transition-colors font-medium\"\n          >\n            <Save size={12} /> {schedSaving ? 'Saving...' : 'Save'}\n          </button>\n          {#if schedule}\n            <button\n              onclick={handleDeleteSchedule}\n              disabled={schedSaving}\n              class=\"flex items-center gap-1 text-xs px-3 py-1.5 rounded border border-red-300 dark:border-red-700 text-red-600 dark:text-red-400 hover:bg-red-50 dark:hover:bg-red-900/20 disabled:opacity-50 transition-colors\"\n            >\n              <Trash2 size={12} /> Remove\n            </button>\n          {/if}\n          <div class=\"flex-1\"></div>\n          <button\n            onclick={() => { showSchedule = false }}\n            class=\"text-xs px-3 py-1.5 rounded border border-gray-300 dark:border-gray-600 text-gray-600 dark:text-gray-400 hover:bg-gray-100 dark:hover:bg-gray-800 transition-colors\"\n          >\n            Close\n          </button>\n        </div>\n      </div>\n    </div>\n  </div>\n{/if}\n\n<ConfirmDialog\n  open={confirmDeleteOpen}\n  title=\"Delete Model\"\n  description={`Are you sure you want to delete \"${pendingDeleteName}\"? This cannot be undone.`}\n  confirmLabel=\"Delete\"\n  destructive\n  loading={confirmDeleteLoading}\n  onconfirm={confirmDelete}\n  oncancel={() => { confirmDeleteOpen = false; pendingDeleteId = ''; pendingDeleteName = '' }}\n/>\n\n<ContextMenu\n  open={!!contextMenu}\n  x={contextMenu?.x ?? 0}\n  y={contextMenu?.y ?? 0}\n  items={getContextItems()}\n  onclose={closeContextMenu}\n/>\n"
  },
  {
    "path": "ui/src/pages/Pipelines.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import type { Pipeline } from '../lib/types/pipelines'\n  import * as api from '../lib/api/pipelines'\n  import { success as toastSuccess, error as toastError } from '../lib/stores/toast.svelte'\n  import { getCurrentPipelineId, pushPipelineDetail, pushPipelineList } from '../lib/stores/router.svelte'\n  import PipelineList from '../lib/components/pipelines/PipelineList.svelte'\n  import PipelineEditor from '../lib/components/pipelines/PipelineEditor.svelte'\n  import InputDialog from '../lib/components/common/InputDialog.svelte'\n\n  let pipelines = $state<Pipeline[]>([])\n  let loading = $state(true)\n  const selectedPipelineId = $derived(getCurrentPipelineId() ?? null)\n  let showCreate = $state(false)\n  let createName = $state('')\n  let createLoading = $state(false)\n\n  onMount(async () => {\n    await loadPipelines()\n  })\n\n  async function loadPipelines() {\n    loading = true\n    try {\n      const res = await api.listPipelines()\n      pipelines = res.pipelines ?? []\n    } catch (e: any) {\n      toastError(e.message || 'Failed to load pipelines')\n    } finally {\n      loading = false\n    }\n  }\n\n  async function handleCreate(name: string) {\n    createLoading = true\n    try {\n      const res = await api.createPipeline({ name })\n      toastSuccess(`Pipeline \"${name}\" created`)\n      showCreate = false\n      createName = ''\n      await loadPipelines()\n      // Open the editor for the newly created pipeline\n      pushPipelineDetail(res.pipeline.id)\n    } catch (e: any) {\n      toastError(e.message || 'Failed to create pipeline')\n    } finally {\n      createLoading = false\n    }\n  }\n\n  async function handleDelete(id: string) {\n    try {\n      await api.deletePipeline(id)\n      toastSuccess('Pipeline deleted')\n      await loadPipelines()\n    } catch (e: any) {\n      toastError(e.message || 'Failed to delete pipeline')\n    }\n  }\n\n  async function handleStart(id: string) {\n    try {\n      await api.startPipeline(id)\n      toastSuccess('Pipeline started')\n      await loadPipelines()\n    } catch (e: any) {\n      toastError(e.message || 'Failed to start pipeline')\n    }\n  }\n\n  async function handleStop(id: string) {\n    try {\n      await api.stopPipeline(id)\n      toastSuccess('Pipeline stopped')\n      await loadPipelines()\n    } catch (e: any) {\n      toastError(e.message || 'Failed to stop pipeline')\n    }\n  }\n</script>\n\n{#if selectedPipelineId}\n  <PipelineEditor\n    pipelineId={selectedPipelineId}\n    onBack={() => {\n      pushPipelineList()\n      loadPipelines()\n    }}\n  />\n{:else}\n  <PipelineList\n    {pipelines}\n    {loading}\n    onCreate={() => { showCreate = true }}\n    onSelect={(id) => { pushPipelineDetail(id) }}\n    onDelete={handleDelete}\n    onStart={handleStart}\n    onStop={handleStop}\n  />\n{/if}\n\n<InputDialog\n  open={showCreate}\n  title=\"Create Pipeline\"\n  description=\"Give your pipeline a name. You can configure the source and sink in the editor.\"\n  placeholder=\"My pipeline\"\n  bind:value={createName}\n  confirmLabel=\"Create\"\n  loading={createLoading}\n  onconfirm={handleCreate}\n  oncancel={() => { showCreate = false; createName = '' }}\n/>\n"
  },
  {
    "path": "ui/src/pages/SavedQueries.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import type { SavedQuery } from '../lib/types/api'\n  import { apiGet, apiDel, apiPost } from '../lib/api/client'\n  import { openQueryTab, openSavedQueryTab } from '../lib/stores/tabs.svelte'\n  import { success as toastSuccess, error as toastError } from '../lib/stores/toast.svelte'\n  import Button from '../lib/components/common/Button.svelte'\n  import Spinner from '../lib/components/common/Spinner.svelte'\n  import Combobox from '../lib/components/common/Combobox.svelte'\n  import type { ComboboxOption } from '../lib/components/common/Combobox.svelte'\n  import Sheet from '../lib/components/common/Sheet.svelte'\n  import ContextMenu, { type ContextMenuItem } from '../lib/components/common/ContextMenu.svelte'\n  import ConfirmDialog from '../lib/components/common/ConfirmDialog.svelte'\n  import {\n    Play,\n    Trash2,\n    Search,\n    X,\n    RefreshCw,\n    MoreHorizontal,\n    Copy,\n    CopyPlus,\n    CalendarClock,\n    Hash,\n    AlignLeft,\n    Eye,\n  } from 'lucide-svelte'\n\n  let queries = $state<SavedQuery[]>([])\n  let loading = $state(true)\n\n  let searchTerm = $state('')\n  type SortMode = 'updated-desc' | 'updated-asc' | 'name-asc' | 'name-desc' | 'length-desc'\n  type FilterMode = 'all' | 'described' | 'undescribed'\n  type DensityMode = 'comfortable' | 'compact'\n  let sortMode = $state<SortMode>('updated-desc')\n  let filterMode = $state<FilterMode>('all')\n  let densityMode = $state<DensityMode>('comfortable')\n\n  let detailsOpen = $state(false)\n  let selectedQuery = $state<SavedQuery | null>(null)\n\n  let contextMenu = $state<{ query: SavedQuery; x: number; y: number } | null>(null)\n\n  let confirmOpen = $state(false)\n  let confirmLoading = $state(false)\n  let pendingDeleteId = $state<string | null>(null)\n  let pendingDeleteName = $state('')\n\n  onMount(loadQueries)\n\n  const filterModeOptions: ComboboxOption[] = [\n    { value: 'all', label: 'All' },\n    { value: 'described', label: 'With description' },\n    { value: 'undescribed', label: 'No description' },\n  ]\n\n  const sortModeOptions: ComboboxOption[] = [\n    { value: 'updated-desc', label: 'Recently updated' },\n    { value: 'updated-asc', label: 'Oldest updated' },\n    { value: 'name-asc', label: 'Name A-Z' },\n    { value: 'name-desc', label: 'Name Z-A' },\n    { value: 'length-desc', label: 'Longest SQL' },\n  ]\n\n  const totalCount = $derived(queries.length)\n  const describedCount = $derived.by(() => queries.filter((q) => !!q.description?.trim()).length)\n  const recentCount = $derived.by(() => {\n    const now = Date.now()\n    return queries.filter((q) => {\n      const ts = parseTime(q.updated_at)\n      return ts > 0 && now - ts <= 7 * 24 * 60 * 60 * 1000\n    }).length\n  })\n  const totalSqlChars = $derived.by(() => queries.reduce((acc, q) => acc + q.query.length, 0))\n\n  const visibleQueries = $derived.by(() => {\n    const term = searchTerm.trim().toLowerCase()\n    let rows = queries.filter((q) => {\n      const hasDesc = !!q.description?.trim()\n      if (filterMode === 'described' && !hasDesc) return false\n      if (filterMode === 'undescribed' && hasDesc) return false\n      if (!term) return true\n      return (\n        q.name.toLowerCase().includes(term) ||\n        (q.description ?? '').toLowerCase().includes(term) ||\n        q.query.toLowerCase().includes(term)\n      )\n    })\n\n    rows = rows.slice().sort((a, b) => {\n      switch (sortMode) {\n        case 'updated-asc':\n          return parseTime(a.updated_at) - parseTime(b.updated_at)\n        case 'name-asc':\n          return a.name.localeCompare(b.name)\n        case 'name-desc':\n          return b.name.localeCompare(a.name)\n        case 'length-desc':\n          return b.query.length - a.query.length\n        case 'updated-desc':\n        default:\n          return parseTime(b.updated_at) - parseTime(a.updated_at)\n      }\n    })\n\n    return rows\n  })\n\n  async function loadQueries() {\n    loading = true\n    try {\n      const res = await apiGet<{ saved_queries: SavedQuery[] }>('/api/saved-queries')\n      queries = res.saved_queries ?? []\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      loading = false\n    }\n  }\n\n  function openInEditor(query: SavedQuery) {\n    closeContextMenu()\n    openSavedQueryTab(query)\n  }\n\n  function openDetails(query: SavedQuery) {\n    selectedQuery = query\n    detailsOpen = true\n    closeContextMenu()\n  }\n\n  function closeDetails() {\n    detailsOpen = false\n    selectedQuery = null\n  }\n\n  async function copySQL(query: SavedQuery) {\n    try {\n      await navigator.clipboard.writeText(query.query)\n      toastSuccess(`Copied SQL for \"${query.name}\"`)\n    } catch {\n      toastError('Clipboard unavailable')\n    } finally {\n      closeContextMenu()\n    }\n  }\n\n  async function duplicateQuery(query: SavedQuery) {\n    try {\n      const created = await apiPost<SavedQuery>(`/api/saved-queries/${query.id}/duplicate`)\n      if (created?.id) {\n        queries = [created, ...queries]\n      } else {\n        await loadQueries()\n      }\n      toastSuccess('Query duplicated')\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      closeContextMenu()\n    }\n  }\n\n  async function deleteQuery(id: string) {\n    try {\n      await apiDel(`/api/saved-queries/${id}`)\n      queries = queries.filter((q) => q.id !== id)\n      if (selectedQuery?.id === id) closeDetails()\n      toastSuccess('Query deleted')\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  function requestDelete(query: SavedQuery) {\n    pendingDeleteName = query.name\n    pendingDeleteId = query.id\n    confirmOpen = true\n    closeContextMenu()\n  }\n\n  function cancelDelete() {\n    confirmOpen = false\n    pendingDeleteId = null\n    pendingDeleteName = ''\n  }\n\n  async function confirmDelete() {\n    if (!pendingDeleteId) return\n    confirmLoading = true\n    try {\n      await deleteQuery(pendingDeleteId)\n      cancelDelete()\n    } finally {\n      confirmLoading = false\n    }\n  }\n\n  function parseTime(value: string): number {\n    const t = Date.parse(value)\n    return Number.isFinite(t) ? t : 0\n  }\n\n  function formatDate(value: string): string {\n    const t = parseTime(value)\n    if (!t) return value\n    return new Date(t).toLocaleString()\n  }\n\n  function formatRelative(value: string): string {\n    const t = parseTime(value)\n    if (!t) return 'unknown'\n    const delta = Math.max(0, Date.now() - t)\n    const minutes = Math.floor(delta / 60000)\n    if (minutes < 1) return 'just now'\n    if (minutes < 60) return `${minutes}m ago`\n    const hours = Math.floor(minutes / 60)\n    if (hours < 24) return `${hours}h ago`\n    const days = Math.floor(hours / 24)\n    if (days < 30) return `${days}d ago`\n    const months = Math.floor(days / 30)\n    if (months < 12) return `${months}mo ago`\n    return `${Math.floor(months / 12)}y ago`\n  }\n\n  function countLines(sql: string): number {\n    if (!sql.trim()) return 0\n    return sql.split(/\\r?\\n/).length\n  }\n\n  function sqlPreview(sql: string, maxLines = 3): string {\n    const lines = sql.split(/\\r?\\n/).slice(0, maxLines)\n    const suffix = countLines(sql) > maxLines ? '\\n...' : ''\n    return lines.join('\\n') + suffix\n  }\n\n  function clearSearch() {\n    searchTerm = ''\n  }\n\n  function closeContextMenu() {\n    contextMenu = null\n  }\n\n  function openContextMenu(event: MouseEvent, query: SavedQuery) {\n    event.preventDefault()\n    event.stopPropagation()\n    contextMenu = {\n      query,\n      x: Math.min(window.innerWidth - 240, event.clientX),\n      y: Math.min(window.innerHeight - 220, event.clientY),\n    }\n  }\n\n  function openContextMenuFromButton(event: MouseEvent, query: SavedQuery) {\n    event.preventDefault()\n    event.stopPropagation()\n    const rect = (event.currentTarget as HTMLElement).getBoundingClientRect()\n    contextMenu = {\n      query,\n      x: Math.min(window.innerWidth - 240, rect.left - 170),\n      y: Math.min(window.innerHeight - 220, rect.bottom + 6),\n    }\n  }\n\n  function getContextItems(): ContextMenuItem[] {\n    const row = contextMenu?.query\n    if (!row) return []\n    return [\n      {\n        id: 'open',\n        label: 'Open in Editor',\n        icon: Play,\n        shortcut: 'Enter',\n        onSelect: () => openInEditor(row),\n      },\n      {\n        id: 'details',\n        label: 'View Details',\n        icon: Eye,\n        shortcut: 'Space',\n        onSelect: () => openDetails(row),\n      },\n      { id: 'sep-main', separator: true },\n      {\n        id: 'copy',\n        label: 'Copy SQL',\n        icon: Copy,\n        shortcut: 'Cmd/Ctrl+C',\n        onSelect: () => copySQL(row),\n      },\n      {\n        id: 'duplicate',\n        label: 'Duplicate',\n        icon: CopyPlus,\n        shortcut: 'Cmd/Ctrl+D',\n        onSelect: () => duplicateQuery(row),\n      },\n      { id: 'sep-danger', separator: true },\n      {\n        id: 'delete',\n        label: 'Delete Saved Query',\n        icon: Trash2,\n        danger: true,\n        onSelect: () => requestDelete(row),\n      },\n    ]\n  }\n</script>\n\n<svelte:window onkeydown={(e) => e.key === 'Escape' && closeContextMenu()} />\n\n<div class=\"flex flex-col h-full overflow-hidden\">\n  <div class=\"ds-page-header shrink-0\">\n    <div class=\"flex items-center gap-3\">\n      <AlignLeft size={17} class=\"text-ch-orange\" />\n      <h1 class=\"ds-page-title\">Saved Queries</h1>\n    </div>\n    <Button size=\"sm\" variant=\"secondary\" onclick={() => { void loadQueries() }}>\n      <RefreshCw size={14} /> Refresh\n    </Button>\n  </div>\n\n  <div class=\"flex-1 overflow-auto p-4\">\n    <div class=\"mx-auto max-w-6xl space-y-4\">\n      <section class=\"ds-panel rounded-xl p-4\">\n        <div class=\"grid grid-cols-2 lg:grid-cols-4 gap-3\">\n          <div class=\"ds-panel-muted p-3\">\n            <div class=\"text-[11px] uppercase tracking-wider text-gray-500\">Total</div>\n            <div class=\"mt-1 text-2xl font-semibold text-gray-900 dark:text-gray-100\">{totalCount}</div>\n          </div>\n          <div class=\"ds-panel-muted p-3\">\n            <div class=\"text-[11px] uppercase tracking-wider text-gray-500\">Updated 7d</div>\n            <div class=\"mt-1 text-2xl font-semibold text-ch-orange\">{recentCount}</div>\n          </div>\n          <div class=\"ds-panel-muted p-3\">\n            <div class=\"text-[11px] uppercase tracking-wider text-gray-500\">With Description</div>\n            <div class=\"mt-1 text-2xl font-semibold text-gray-900 dark:text-gray-100\">{describedCount}</div>\n          </div>\n          <div class=\"ds-panel-muted p-3\">\n            <div class=\"text-[11px] uppercase tracking-wider text-gray-500\">SQL Characters</div>\n            <div class=\"mt-1 text-2xl font-semibold text-gray-900 dark:text-gray-100\">{totalSqlChars.toLocaleString()}</div>\n          </div>\n        </div>\n      </section>\n\n      <section class=\"ds-panel rounded-xl p-3.5\">\n        <div class=\"grid grid-cols-1 lg:grid-cols-[1fr_auto_auto_auto] gap-2.5\">\n          <div class=\"flex items-center gap-2 rounded-lg border border-gray-300/80 dark:border-gray-700/80 bg-gray-100/60 dark:bg-gray-900/60 px-2.5\">\n            <Search size={14} class=\"text-gray-500 shrink-0\" />\n            <input\n              type=\"text\"\n              class=\"w-full h-9 bg-transparent text-[13px] outline-none text-gray-800 dark:text-gray-200 placeholder:text-gray-500\"\n              placeholder=\"Search name, description, or SQL...\"\n              bind:value={searchTerm}\n            />\n            {#if searchTerm}\n              <button\n                class=\"rounded p-1 text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 hover:bg-gray-200/70 dark:hover:bg-gray-800/70\"\n                onclick={clearSearch}\n                title=\"Clear search\"\n              >\n                <X size={13} />\n              </button>\n            {/if}\n          </div>\n\n          <label class=\"inline-flex items-center gap-2 px-0.5 text-[12px] font-medium text-gray-500\">\n            Filter\n            <div class=\"min-w-44\">\n              <Combobox\n                options={filterModeOptions}\n                value={filterMode}\n                onChange={(v) => filterMode = v as FilterMode}\n              />\n            </div>\n          </label>\n\n          <label class=\"inline-flex items-center gap-2 px-0.5 text-[12px] font-medium text-gray-500\">\n            Sort\n            <div class=\"min-w-56\">\n              <Combobox\n                options={sortModeOptions}\n                value={sortMode}\n                onChange={(v) => sortMode = v as SortMode}\n              />\n            </div>\n          </label>\n\n          <div class=\"ds-segment\">\n            <button\n              class=\"ds-segment-btn {densityMode === 'comfortable' ? 'ds-segment-btn-active' : ''}\"\n              onclick={() => densityMode = 'comfortable'}\n            >\n              Comfortable\n            </button>\n            <button\n              class=\"ds-segment-btn {densityMode === 'compact' ? 'ds-segment-btn-active' : ''}\"\n              onclick={() => densityMode = 'compact'}\n            >\n              Compact\n            </button>\n          </div>\n        </div>\n      </section>\n\n      {#if loading}\n        <div class=\"flex items-center justify-center py-14\"><Spinner /></div>\n      {:else if queries.length === 0}\n        <div class=\"ds-empty rounded-xl p-8\">\n          <p class=\"text-sm text-gray-500\">No saved queries yet</p>\n          <p class=\"text-xs text-gray-400 mt-1\">Save a query from the SQL editor and it will appear here.</p>\n          <div class=\"mt-4\">\n            <Button size=\"sm\" onclick={() => openQueryTab()}>\n              <Play size={14} /> Open New Query\n            </Button>\n          </div>\n        </div>\n      {:else if visibleQueries.length === 0}\n        <div class=\"ds-empty rounded-xl p-8\">\n          <p class=\"text-sm text-gray-500\">No query matches your filters.</p>\n          <p class=\"text-xs text-gray-400 mt-1\">Try another search, filter, or sorting mode.</p>\n        </div>\n      {:else}\n        <div class=\"grid grid-cols-1 gap-3\">\n          {#each visibleQueries as query (query.id)}\n            <article\n              class=\"ds-panel rounded-xl transition-colors hover:border-orange-400/45\"\n              oncontextmenu={(e) => openContextMenu(e, query)}\n            >\n              <div class=\"px-4 py-3\">\n                <div class=\"flex items-start gap-3\">\n                  <div class=\"min-w-0 flex-1\">\n                    <div class=\"flex items-center gap-2 flex-wrap\">\n                      <h3 class=\"text-sm font-semibold text-gray-900 dark:text-gray-100 truncate\">{query.name}</h3>\n                      <span class=\"brand-pill rounded-md px-1.5 py-0.5 text-[10px] font-semibold\">\n                        {countLines(query.query)} lines\n                      </span>\n                    </div>\n                    {#if query.description?.trim()}\n                      <p class=\"mt-1 text-xs text-gray-500 line-clamp-2\">{query.description}</p>\n                    {/if}\n                  </div>\n\n                  <button\n                    class=\"rounded-md p-1.5 text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 hover:bg-gray-200/70 dark:hover:bg-gray-800/70\"\n                    onclick={(e) => openContextMenuFromButton(e, query)}\n                    title=\"More actions\"\n                  >\n                    <MoreHorizontal size={15} />\n                  </button>\n                </div>\n\n                <pre class=\"mt-3 ds-panel-muted p-3 text-[12px] leading-relaxed text-gray-700 dark:text-gray-300 font-mono overflow-x-auto whitespace-pre\">{sqlPreview(query.query, densityMode === 'compact' ? 2 : 4)}</pre>\n\n                <div class=\"mt-3 flex items-center justify-between gap-3 flex-wrap\">\n                  <div class=\"flex items-center gap-3 text-[11px] text-gray-500\">\n                    <span class=\"inline-flex items-center gap-1\"><CalendarClock size={12} /> {formatRelative(query.updated_at)}</span>\n                    <span class=\"inline-flex items-center gap-1\"><Hash size={12} /> {query.query.length.toLocaleString()} chars</span>\n                  </div>\n\n                  <div class=\"inline-flex items-center gap-1.5\">\n                    <button\n                      class=\"ds-btn-outline px-2.5 py-1.5\"\n                      onclick={() => openDetails(query)}\n                    >\n                      <Eye size={12} /> Details\n                    </button>\n                    <button\n                      class=\"ds-btn-outline px-2.5 py-1.5\"\n                      onclick={() => copySQL(query)}\n                    >\n                      <Copy size={12} /> Copy SQL\n                    </button>\n                    <button\n                      class=\"ds-btn-primary px-2.5 py-1.5\"\n                      onclick={() => openInEditor(query)}\n                    >\n                      <Play size={12} /> Open\n                    </button>\n                  </div>\n                </div>\n              </div>\n            </article>\n          {/each}\n        </div>\n      {/if}\n    </div>\n  </div>\n</div>\n\n<ConfirmDialog\n  open={confirmOpen}\n  title=\"Delete saved query?\"\n  description={pendingDeleteName ? `Delete \"${pendingDeleteName}\"? This action cannot be undone.` : 'This action cannot be undone.'}\n  confirmLabel=\"Delete\"\n  destructive={true}\n  loading={confirmLoading}\n  onconfirm={confirmDelete}\n  oncancel={cancelDelete}\n/>\n\n<ContextMenu\n  open={!!contextMenu}\n  x={contextMenu?.x ?? 0}\n  y={contextMenu?.y ?? 0}\n  items={getContextItems()}\n  onclose={closeContextMenu}\n/>\n\n<Sheet\n  open={detailsOpen}\n  title={selectedQuery?.name ?? 'Saved Query'}\n  size=\"lg\"\n  onclose={closeDetails}\n>\n  {#if selectedQuery}\n    <div class=\"space-y-4\">\n      <div class=\"grid grid-cols-1 sm:grid-cols-2 gap-3\">\n        <div class=\"ds-panel-muted p-3\">\n          <div class=\"text-[11px] uppercase tracking-wider text-gray-500\">Updated</div>\n          <div class=\"mt-1 text-sm text-gray-900 dark:text-gray-100\">{formatDate(selectedQuery.updated_at)}</div>\n        </div>\n        <div class=\"ds-panel-muted p-3\">\n          <div class=\"text-[11px] uppercase tracking-wider text-gray-500\">Created</div>\n          <div class=\"mt-1 text-sm text-gray-900 dark:text-gray-100\">{formatDate(selectedQuery.created_at)}</div>\n        </div>\n        <div class=\"ds-panel-muted p-3\">\n          <div class=\"text-[11px] uppercase tracking-wider text-gray-500\">Line Count</div>\n          <div class=\"mt-1 text-sm text-gray-900 dark:text-gray-100\">{countLines(selectedQuery.query)}</div>\n        </div>\n        <div class=\"ds-panel-muted p-3\">\n          <div class=\"text-[11px] uppercase tracking-wider text-gray-500\">Characters</div>\n          <div class=\"mt-1 text-sm text-gray-900 dark:text-gray-100\">{selectedQuery.query.length.toLocaleString()}</div>\n        </div>\n      </div>\n\n      {#if selectedQuery.description?.trim()}\n        <div class=\"ds-panel-muted p-3\">\n          <div class=\"text-[11px] uppercase tracking-wider text-gray-500\">Description</div>\n          <p class=\"mt-1 text-sm text-gray-700 dark:text-gray-300\">{selectedQuery.description}</p>\n        </div>\n      {/if}\n\n      <div class=\"ds-panel-muted p-3\">\n        <div class=\"text-[11px] uppercase tracking-wider text-gray-500 mb-2\">SQL</div>\n        <pre class=\"text-[12px] leading-relaxed text-gray-800 dark:text-gray-200 font-mono overflow-x-auto whitespace-pre p-2 rounded-md bg-gray-100 dark:bg-gray-950 border border-gray-200 dark:border-gray-800\">{selectedQuery.query}</pre>\n      </div>\n\n      <div class=\"flex items-center gap-2 flex-wrap\">\n        <Button size=\"sm\" onclick={() => selectedQuery && openInEditor(selectedQuery)}>\n          <Play size={13} /> Open in Editor\n        </Button>\n        <Button size=\"sm\" variant=\"secondary\" onclick={() => selectedQuery && copySQL(selectedQuery)}>\n          <Copy size={13} /> Copy SQL\n        </Button>\n        <Button size=\"sm\" variant=\"secondary\" onclick={() => selectedQuery && duplicateQuery(selectedQuery)}>\n          <CopyPlus size={13} /> Duplicate\n        </Button>\n        <Button size=\"sm\" variant=\"danger\" onclick={() => selectedQuery && requestDelete(selectedQuery)}>\n          <Trash2 size={13} /> Delete\n        </Button>\n      </div>\n    </div>\n  {/if}\n</Sheet>\n"
  },
  {
    "path": "ui/src/pages/Schedules.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import type { Schedule, ScheduleRun, SavedQuery } from '../lib/types/api'\n  import { apiGet, apiPost, apiPut, apiDel } from '../lib/api/client'\n  import { success as toastSuccess, error as toastError } from '../lib/stores/toast.svelte'\n  import { openSavedQueryTab } from '../lib/stores/tabs.svelte'\n  import Button from '../lib/components/common/Button.svelte'\n  import Combobox from '../lib/components/common/Combobox.svelte'\n  import Spinner from '../lib/components/common/Spinner.svelte'\n  import Sheet from '../lib/components/common/Sheet.svelte'\n  import ConfirmDialog from '../lib/components/common/ConfirmDialog.svelte'\n  import { Clock, Plus, Play, Trash2, ChevronDown, ChevronRight, FileText, ExternalLink } from 'lucide-svelte'\n\n  let schedules = $state<Schedule[]>([])\n  let loading = $state(true)\n  let savedQueries = $state<SavedQuery[]>([])\n  let savedQueriesLoading = $state(false)\n\n  // Create/edit modal\n  let showModal = $state(false)\n  let editingId = $state<string | null>(null)\n  let formName = $state('')\n  let formSavedQueryId = $state('')\n  let formCron = $state('')\n  let formTimezone = $state('UTC')\n  let formTimeout = $state(60000)\n  let saving = $state(false)\n\n  // Run history\n  const RUNS_PAGE_SIZE = 10\n  let expandedSchedule = $state<string | null>(null)\n  let runs = $state<ScheduleRun[]>([])\n  let runsLoading = $state(false)\n  let runsLoadingMore = $state(false)\n  let runsOffset = $state(0)\n  let runsHasMore = $state(false)\n\n  // Run details sheet\n  let showRunSheet = $state(false)\n  let selectedRun = $state<ScheduleRun | null>(null)\n  let selectedSchedule = $state<Schedule | null>(null)\n\n  // Confirm delete\n  let confirmOpen = $state(false)\n  let confirmLoading = $state(false)\n  let pendingDeleteId = $state<string | null>(null)\n\n  // Manual run loading\n  let runningId = $state<string | null>(null)\n\n  onMount(async () => {\n    await Promise.all([loadSchedules(), loadSavedQueries()])\n  })\n\n  const savedQueryMap = $derived.by(() => {\n    const map = new Map<string, SavedQuery>()\n    for (const q of savedQueries) map.set(q.id, q)\n    return map\n  })\n\n  async function loadSchedules() {\n    loading = true\n    try {\n      const res = await apiGet<{ schedules: Schedule[] }>('/api/schedules')\n      schedules = res.schedules ?? []\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      loading = false\n    }\n  }\n\n  async function loadSavedQueries() {\n    savedQueriesLoading = true\n    try {\n      const res = await apiGet<{ saved_queries: SavedQuery[] }>('/api/saved-queries')\n      savedQueries = res.saved_queries ?? []\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      savedQueriesLoading = false\n    }\n  }\n\n  function openCreateModal() {\n    editingId = null\n    formName = ''\n    formSavedQueryId = ''\n    formCron = ''\n    formTimezone = 'UTC'\n    formTimeout = 60000\n    void loadSavedQueries()\n    showModal = true\n  }\n\n  function openEditModal(s: Schedule) {\n    editingId = s.id\n    formName = s.name\n    formSavedQueryId = s.saved_query_id\n    formCron = s.cron\n    formTimezone = s.timezone\n    formTimeout = s.timeout_ms\n    showModal = true\n  }\n\n  async function saveSchedule() {\n    if (!formName.trim() || !formCron.trim()) {\n      toastError('Name and cron expression are required')\n      return\n    }\n    saving = true\n    try {\n      if (editingId) {\n        await apiPut(`/api/schedules/${editingId}`, {\n          name: formName.trim(),\n          cron: formCron.trim(),\n          timezone: formTimezone,\n          timeout_ms: formTimeout,\n        })\n        toastSuccess('Schedule updated')\n      } else {\n        if (!formSavedQueryId) {\n          toastError('Please select a saved query')\n          saving = false\n          return\n        }\n        await apiPost('/api/schedules', {\n          name: formName.trim(),\n          saved_query_id: formSavedQueryId,\n          cron: formCron.trim(),\n          timezone: formTimezone,\n          timeout_ms: formTimeout,\n        })\n        toastSuccess('Schedule created')\n      }\n      showModal = false\n      await loadSchedules()\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      saving = false\n    }\n  }\n\n  function askDeleteSchedule(id: string) {\n    pendingDeleteId = id\n    confirmOpen = true\n  }\n\n  async function performDeleteSchedule(id: string) {\n    try {\n      await apiDel(`/api/schedules/${id}`)\n      schedules = schedules.filter(s => s.id !== id)\n      if (expandedSchedule === id) {\n        expandedSchedule = null\n        runs = []\n        runsOffset = 0\n        runsHasMore = false\n      }\n      toastSuccess('Schedule deleted')\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function confirmDeleteSchedule() {\n    if (!pendingDeleteId) return\n    confirmLoading = true\n    try {\n      await performDeleteSchedule(pendingDeleteId)\n      confirmOpen = false\n      pendingDeleteId = null\n    } finally {\n      confirmLoading = false\n    }\n  }\n\n  async function toggleEnabled(s: Schedule) {\n    try {\n      await apiPut(`/api/schedules/${s.id}`, { enabled: !s.enabled })\n      schedules = schedules.map(sc => sc.id === s.id ? { ...sc, enabled: !sc.enabled } : sc)\n      toastSuccess(s.enabled ? 'Schedule disabled' : 'Schedule enabled')\n    } catch (e: any) {\n      toastError(e.message)\n    }\n  }\n\n  async function manualRun(id: string) {\n    runningId = id\n    try {\n      const res = await apiPost<{ success: boolean; error?: string }>(`/api/schedules/${id}/run`)\n      if (res.success) {\n        toastSuccess('Manual run completed')\n      } else {\n        toastError(res.error ?? 'Run failed')\n      }\n      await loadSchedules()\n      if (expandedSchedule === id) await loadRuns(id)\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      runningId = null\n    }\n  }\n\n  async function loadRuns(id: string, append = false) {\n    const offset = append ? runsOffset : 0\n    if (append) {\n      runsLoadingMore = true\n    } else {\n      runsLoading = true\n      runs = []\n      runsOffset = 0\n      runsHasMore = false\n    }\n    try {\n      const res = await apiGet<{ runs: ScheduleRun[]; has_more?: boolean; next_offset?: number }>(\n        `/api/schedules/${id}/runs?limit=${RUNS_PAGE_SIZE}&offset=${offset}`,\n      )\n      if (expandedSchedule !== id) return\n      const incoming = res.runs ?? []\n      runs = append ? [...runs, ...incoming] : incoming\n      runsHasMore = !!res.has_more\n      runsOffset = typeof res.next_offset === 'number' ? res.next_offset : offset + incoming.length\n    } catch (e: any) {\n      toastError(e.message)\n    } finally {\n      if (append) {\n        runsLoadingMore = false\n      } else {\n        runsLoading = false\n      }\n    }\n  }\n\n  async function toggleRuns(id: string) {\n    if (expandedSchedule === id) {\n      expandedSchedule = null\n      runs = []\n      runsOffset = 0\n      runsHasMore = false\n      runsLoadingMore = false\n      return\n    }\n    expandedSchedule = id\n    await loadRuns(id, false)\n  }\n\n  async function openRunDetails(schedule: Schedule, run: ScheduleRun) {\n    if (savedQueries.length === 0) await loadSavedQueries()\n    selectedSchedule = schedule\n    selectedRun = run\n    showRunSheet = true\n  }\n\n  function openScheduleQueryInEditor() {\n    if (!selectedSchedule) return\n    const q = savedQueryMap.get(selectedSchedule.saved_query_id)\n    if (!q) return\n    openSavedQueryTab(q)\n    showRunSheet = false\n  }\n\n  function formatTime(ts: string | null): string {\n    if (!ts) return '—'\n    try {\n      return new Date(ts).toLocaleString()\n    } catch {\n      return ts\n    }\n  }\n\n  function statusBadge(status: string | null): { cls: string; label: string } {\n    switch (status) {\n      case 'success': return { cls: 'bg-emerald-100 dark:bg-emerald-900/30 text-emerald-700 dark:text-emerald-300', label: 'Success' }\n      case 'error': return { cls: 'bg-red-100 dark:bg-red-900/30 text-red-700 dark:text-red-300', label: 'Error' }\n      case 'running': return { cls: 'bg-orange-100 dark:bg-orange-900/30 text-orange-700 dark:text-orange-300', label: 'Running' }\n      default: return { cls: 'bg-gray-100 dark:bg-gray-800 text-gray-500', label: status ?? 'Pending' }\n    }\n  }\n</script>\n\n<div class=\"flex flex-col h-full\">\n  <div class=\"ds-page-header\">\n    <div class=\"flex items-center gap-3\">\n      <Clock size={18} class=\"text-ch-orange\" />\n      <h1 class=\"ds-page-title\">Scheduled Queries</h1>\n    </div>\n    <Button size=\"sm\" onclick={openCreateModal}>\n      <Plus size={14} /> Create Schedule\n    </Button>\n  </div>\n\n  <div class=\"flex-1 overflow-auto p-4\">\n    {#if loading}\n      <div class=\"flex items-center justify-center py-12\"><Spinner /></div>\n    {:else if schedules.length === 0}\n      <div class=\"ds-empty text-gray-500\">\n        <Clock size={36} class=\"mx-auto mb-2 text-gray-300 dark:text-gray-700\" />\n        <p class=\"mb-1\">No scheduled queries yet</p>\n        <p class=\"text-xs text-gray-400 dark:text-gray-600\">Create a schedule to run saved queries automatically</p>\n      </div>\n    {:else}\n      <div class=\"flex flex-col gap-2\">\n        {#each schedules as schedule (schedule.id)}\n          {@const badge = statusBadge(schedule.last_status)}\n          {@const queryRef = savedQueryMap.get(schedule.saved_query_id)}\n          <div class=\"ds-panel rounded-xl overflow-hidden\">\n            <div class=\"flex items-center gap-3 p-3 border-b border-gray-200/70 dark:border-gray-800/70\">\n              <button\n                class=\"text-gray-400 hover:text-gray-600 dark:hover:text-gray-300\"\n                onclick={() => toggleRuns(schedule.id)}\n                title=\"View runs\"\n              >\n                {#if expandedSchedule === schedule.id}<ChevronDown size={14} />{:else}<ChevronRight size={14} />{/if}\n              </button>\n\n              <div class=\"flex-1 min-w-0\">\n                <div class=\"flex items-center gap-2 flex-wrap\">\n                  <span class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">{schedule.name}</span>\n                  <code class=\"ds-badge ds-badge-neutral font-mono\">{schedule.cron}</code>\n                  <span class=\"text-xs text-gray-400\">{schedule.timezone}</span>\n                </div>\n                <div class=\"flex items-center gap-3 mt-1 text-xs text-gray-500 flex-wrap\">\n                  <span class=\"px-1.5 py-0.5 rounded {badge.cls}\">{badge.label}</span>\n                  <span>Last: {formatTime(schedule.last_run_at)}</span>\n                  <span>Next: {formatTime(schedule.next_run_at)}</span>\n                  {#if queryRef}\n                    <span class=\"text-gray-400\">{queryRef.name}</span>\n                  {/if}\n                </div>\n              </div>\n\n              <button\n                class=\"relative w-9 h-5 rounded-full transition-colors {schedule.enabled ? 'bg-ch-blue' : 'bg-gray-300 dark:bg-gray-700'}\"\n                onclick={() => toggleEnabled(schedule)}\n                title={schedule.enabled ? 'Disable' : 'Enable'}\n              >\n                <span class=\"absolute top-0.5 left-0.5 w-4 h-4 rounded-full bg-white transition-transform {schedule.enabled ? 'translate-x-4' : ''}\"></span>\n              </button>\n\n              <div class=\"flex items-center gap-1\">\n                <button\n                  class=\"ds-icon-btn disabled:opacity-50\"\n                  onclick={() => manualRun(schedule.id)}\n                  disabled={runningId === schedule.id}\n                  title=\"Run now\"\n                >\n                  {#if runningId === schedule.id}<Spinner size=\"sm\" />{:else}<Play size={14} />{/if}\n                </button>\n                <button\n                  class=\"ds-icon-btn hover:text-gray-700 dark:hover:text-gray-300\"\n                  onclick={() => openEditModal(schedule)}\n                  title=\"Edit\"\n                >\n                  <Clock size={14} />\n                </button>\n                <button\n                  class=\"ds-icon-btn hover:text-red-500 dark:hover:text-red-400\"\n                  onclick={() => askDeleteSchedule(schedule.id)}\n                  title=\"Delete\"\n                >\n                  <Trash2 size={14} />\n                </button>\n              </div>\n            </div>\n\n            {#if expandedSchedule === schedule.id}\n              <div class=\"px-3 py-3\">\n                {#if runsLoading}\n                  <div class=\"flex items-center justify-center py-4\"><Spinner size=\"sm\" /></div>\n                {:else if runs.length === 0}\n                  <p class=\"text-xs text-gray-500 py-2\">No runs yet</p>\n                {:else}\n                  <div class=\"overflow-x-auto\">\n                    <table class=\"ds-table text-xs\">\n                      <thead>\n                        <tr class=\"ds-table-head-row\">\n                          <th class=\"ds-table-th-compact\">Started</th>\n                          <th class=\"ds-table-th-compact\">Status</th>\n                          <th class=\"ds-table-th-right-compact\">Elapsed</th>\n                          <th class=\"ds-table-th-right-compact\">Rows</th>\n                          <th class=\"ds-table-th-compact\">Error</th>\n                          <th class=\"ds-table-th-right-compact\">Details</th>\n                        </tr>\n                      </thead>\n                      <tbody>\n                        {#each runs as run}\n                          {@const rb = statusBadge(run.status)}\n                          <tr class=\"ds-table-row\">\n                            <td class=\"ds-td-compact\">{formatTime(run.started_at)}</td>\n                            <td class=\"ds-td-compact\"><span class=\"ds-badge {rb.cls}\">{rb.label}</span></td>\n                            <td class=\"ds-td-compact text-right\">{run.elapsed_ms}ms</td>\n                            <td class=\"ds-td-compact text-right\">{run.rows_affected}</td>\n                            <td class=\"ds-td-compact text-red-500 max-w-xs truncate\">{run.error ?? '—'}</td>\n                            <td class=\"ds-td-compact text-right\">\n                              <button\n                                class=\"ds-btn-outline px-2 py-1\"\n                                onclick={() => openRunDetails(schedule, run)}\n                              >\n                                <FileText size={12} /> View\n                              </button>\n                            </td>\n                          </tr>\n                        {/each}\n                      </tbody>\n                    </table>\n                  </div>\n                  {#if runsHasMore}\n                    <div class=\"mt-2 flex justify-center\">\n                      <button\n                        class=\"ds-btn-outline px-3 py-1.5 disabled:opacity-60\"\n                        onclick={() => loadRuns(schedule.id, true)}\n                        disabled={runsLoadingMore}\n                      >\n                        {#if runsLoadingMore}<Spinner size=\"sm\" />{:else}Load {RUNS_PAGE_SIZE} more{/if}\n                      </button>\n                    </div>\n                  {/if}\n                {/if}\n              </div>\n            {/if}\n          </div>\n        {/each}\n      </div>\n    {/if}\n  </div>\n</div>\n\n<Sheet open={showModal} title={editingId ? 'Edit Schedule' : 'Create Schedule'} size=\"sm\" onclose={() => showModal = false}>\n  <div class=\"flex flex-col gap-3\">\n    <div>\n      <label for=\"schedule-name\" class=\"ds-form-label\">Name</label>\n      <input\n        id=\"schedule-name\"\n        type=\"text\"\n        class=\"ds-input\"\n        placeholder=\"e.g. Daily Aggregation\"\n        bind:value={formName}\n      />\n    </div>\n\n    {#if !editingId}\n      <div>\n        <p class=\"ds-form-label\">Saved Query</p>\n        <Combobox\n          options={savedQueries.map((q) => ({\n            value: q.id,\n            label: q.name,\n            hint: q.description || q.query,\n            keywords: `${q.name} ${q.description ?? ''} ${q.query}`,\n          }))}\n          value={formSavedQueryId}\n          emptyText={savedQueriesLoading ? 'Loading saved queries...' : 'No saved queries found'}\n          placeholder=\"Select a saved query...\"\n          disabled={savedQueriesLoading || savedQueries.length === 0}\n          onChange={(id) => formSavedQueryId = id}\n        />\n        {#if savedQueries.length === 0}\n          <p class=\"mt-1 text-xs text-amber-500\">No saved queries available. Create one first in Saved Queries.</p>\n        {/if}\n      </div>\n    {/if}\n\n    <div>\n      <label for=\"schedule-cron\" class=\"ds-form-label\">Cron Expression</label>\n      <input\n        id=\"schedule-cron\"\n        type=\"text\"\n        class=\"ds-input font-mono\"\n        placeholder=\"0 */6 * * *\"\n        bind:value={formCron}\n      />\n      <p class=\"text-xs text-gray-400 mt-1\">e.g. <code>0 */6 * * *</code> = every 6 hours</p>\n    </div>\n\n    <div class=\"flex gap-3\">\n      <div class=\"flex-1\">\n        <label for=\"schedule-timezone\" class=\"ds-form-label\">Timezone</label>\n        <input\n          id=\"schedule-timezone\"\n          type=\"text\"\n          class=\"ds-input\"\n          bind:value={formTimezone}\n        />\n      </div>\n      <div class=\"flex-1\">\n        <label for=\"schedule-timeout\" class=\"ds-form-label\">Timeout (ms)</label>\n        <input\n          id=\"schedule-timeout\"\n          type=\"number\"\n          class=\"ds-input\"\n          bind:value={formTimeout}\n        />\n      </div>\n    </div>\n\n    <div class=\"flex justify-end gap-2 pt-2\">\n      <Button variant=\"secondary\" size=\"sm\" onclick={() => showModal = false}>Cancel</Button>\n      <Button size=\"sm\" loading={saving} onclick={saveSchedule} disabled={!editingId && (savedQueriesLoading || savedQueries.length === 0)}>\n        {editingId ? 'Update' : 'Create'}\n      </Button>\n    </div>\n  </div>\n</Sheet>\n\n<Sheet open={showRunSheet} title=\"Schedule Run Details\" size=\"lg\" onclose={() => showRunSheet = false}>\n  {#if selectedRun && selectedSchedule}\n    {@const runBadge = statusBadge(selectedRun.status)}\n    {@const saved = savedQueryMap.get(selectedSchedule.saved_query_id)}\n\n    <div class=\"space-y-5\">\n      <div class=\"grid grid-cols-2 gap-3\">\n        <div class=\"surface-card rounded-lg p-3\">\n          <p class=\"text-xs text-gray-500 mb-1\">Schedule</p>\n          <p class=\"text-sm font-semibold text-gray-800 dark:text-gray-100\">{selectedSchedule.name}</p>\n          <p class=\"text-xs text-gray-500 mt-1 font-mono\">{selectedSchedule.cron} ({selectedSchedule.timezone})</p>\n        </div>\n        <div class=\"surface-card rounded-lg p-3\">\n          <p class=\"text-xs text-gray-500 mb-1\">Run Status</p>\n          <span class=\"inline-flex px-2 py-1 rounded text-xs font-medium {runBadge.cls}\">{runBadge.label}</span>\n          <p class=\"text-xs text-gray-500 mt-1\">ID: <span class=\"font-mono\">{selectedRun.id}</span></p>\n        </div>\n      </div>\n\n      <div class=\"grid grid-cols-2 gap-3\">\n        <div class=\"surface-card rounded-lg p-3\">\n          <p class=\"text-xs text-gray-500\">Started</p>\n          <p class=\"text-sm text-gray-800 dark:text-gray-100 mt-1\">{formatTime(selectedRun.started_at)}</p>\n        </div>\n        <div class=\"surface-card rounded-lg p-3\">\n          <p class=\"text-xs text-gray-500\">Finished</p>\n          <p class=\"text-sm text-gray-800 dark:text-gray-100 mt-1\">{formatTime(selectedRun.finished_at)}</p>\n        </div>\n        <div class=\"surface-card rounded-lg p-3\">\n          <p class=\"text-xs text-gray-500\">Elapsed</p>\n          <p class=\"text-sm text-gray-800 dark:text-gray-100 mt-1\">{selectedRun.elapsed_ms} ms</p>\n        </div>\n        <div class=\"surface-card rounded-lg p-3\">\n          <p class=\"text-xs text-gray-500\">Rows Affected</p>\n          <p class=\"text-sm text-gray-800 dark:text-gray-100 mt-1\">{selectedRun.rows_affected}</p>\n        </div>\n      </div>\n\n      {#if selectedRun.error}\n        <div class=\"surface-card rounded-lg p-3 border-red-400/40\">\n          <p class=\"text-xs font-medium text-red-500 mb-1\">Error</p>\n          <pre class=\"text-xs text-red-400 whitespace-pre-wrap break-all font-mono\">{selectedRun.error}</pre>\n        </div>\n      {/if}\n\n      <div class=\"surface-card rounded-lg p-3\">\n        <div class=\"flex items-center justify-between gap-2 mb-2\">\n          <p class=\"text-xs font-medium text-gray-600 dark:text-gray-300\">Saved Query</p>\n          {#if saved}\n            <button class=\"inline-flex items-center gap-1 text-xs text-ch-blue hover:underline\" onclick={openScheduleQueryInEditor}>\n              <ExternalLink size={12} /> Open in editor\n            </button>\n          {/if}\n        </div>\n        {#if saved}\n          <p class=\"text-sm text-gray-800 dark:text-gray-100 mb-2\">{saved.name}</p>\n          <pre class=\"text-xs font-mono text-gray-600 dark:text-gray-300 bg-gray-100 dark:bg-gray-900 rounded p-2 overflow-auto max-h-48\">{saved.query}</pre>\n        {:else}\n          <p class=\"text-xs text-gray-500\">Saved query metadata not available.</p>\n        {/if}\n      </div>\n    </div>\n  {/if}\n</Sheet>\n\n<ConfirmDialog\n  open={confirmOpen}\n  title=\"Delete schedule?\"\n  description=\"This schedule and its run history will be removed permanently.\"\n  confirmLabel=\"Delete\"\n  destructive={true}\n  loading={confirmLoading}\n  onconfirm={confirmDeleteSchedule}\n  oncancel={() => confirmOpen = false}\n/>\n"
  },
  {
    "path": "ui/src/pages/Settings.svelte",
    "content": "<script lang=\"ts\">\n  import { onMount } from 'svelte'\n  import { apiGet, apiPost } from '../lib/api/client'\n  import type { LicenseInfo } from '../lib/types/api'\n  import { success, error as toastError } from '../lib/stores/toast.svelte'\n  import { getSession } from '../lib/stores/session.svelte'\n  import {\n    Settings as SettingsIcon,\n    Shield,\n    ShieldCheck,\n    ShieldAlert,\n    Upload,\n    X,\n    Sparkles,\n    FileText,\n    ExternalLink,\n    Building2,\n    Scale,\n    KeyRound,\n  } from 'lucide-svelte'\n  import logo from '../assets/logo.png'\n\n  let license = $state<LicenseInfo | null>(null)\n  let loading = $state(true)\n  let activating = $state(false)\n  let deactivating = $state(false)\n  let showConfirmDeactivate = $state(false)\n  let licenseInput = $state('')\n  let inputMode = $state<'paste' | 'idle'>('idle')\n  let fileInput = $state<HTMLInputElement | null>(null)\n  type SettingsTab = 'license' | 'instance' | 'legal'\n  const settingsTabItems: Array<{ id: SettingsTab; label: string }> = [\n    { id: 'license', label: 'License' },\n    { id: 'instance', label: 'Instance' },\n    { id: 'legal', label: 'Legal' },\n  ]\n  let activeTab = $state<SettingsTab>('license')\n\n  const session = $derived(getSession())\n\n  const legalScopes = [\n    {\n      title: 'CH-UI Core',\n      badge: 'Apache-2.0',\n      description:\n        'Community features and open-source code are licensed under Apache License 2.0.',\n    },\n    {\n      title: 'CH-UI Pro Modules',\n      badge: 'Proprietary',\n      description:\n        'Commercial Pro capabilities are not Apache-2.0 and require a signed commercial agreement + valid license.',\n    },\n    {\n      title: 'License Boundary',\n      badge: 'Scope-separated',\n      description:\n        'Apache terms apply to Core. Commercial terms apply to Pro-only modules and entitlements.',\n    },\n  ]\n\n  const proActive = $derived(!!(license?.valid && license?.edition?.toLowerCase() === 'pro'))\n  const expiredLicense = $derived(!!(license && !license.valid && !!license.license_id))\n\n  const licenseState = $derived.by(() => {\n    if (loading) return 'loading'\n    if (proActive) return 'pro'\n    if (expiredLicense) return 'expired'\n    return 'community'\n  })\n\n  async function loadLicense() {\n    try {\n      const res = await apiGet<LicenseInfo>('/api/license')\n      license = res\n    } catch {\n      license = null\n    } finally {\n      loading = false\n    }\n  }\n\n  function normalizeSettingsTab(value: string | null | undefined): SettingsTab {\n    const raw = (value ?? '').trim().toLowerCase()\n    if (raw === 'access') return 'license'\n    if (raw === 'overview') return 'license'\n    if (raw === 'licensing') return 'license'\n    if (raw === 'brand') return 'instance'\n    if ((settingsTabItems.map((item) => item.id) as string[]).includes(raw)) return raw as SettingsTab\n    return 'license'\n  }\n\n  function syncSettingsTabParam(tab: SettingsTab) {\n    if (typeof window === 'undefined') return\n    const url = new URL(window.location.href)\n    if (url.searchParams.get('tab') === tab) return\n    url.searchParams.set('tab', tab)\n    history.replaceState(null, '', `${url.pathname}?${url.searchParams.toString()}`)\n  }\n\n  function switchTab(tab: SettingsTab, syncUrl = true) {\n    activeTab = tab\n    if (syncUrl) syncSettingsTabParam(tab)\n  }\n\n  onMount(() => {\n    const initialTab = normalizeSettingsTab(\n      typeof window === 'undefined' ? null : new URLSearchParams(window.location.search).get('tab'),\n    )\n    switchTab(initialTab, true)\n    void loadLicense()\n  })\n\n  async function activate() {\n    const text = licenseInput.trim()\n    if (!text) return\n\n    activating = true\n    try {\n      const res = await apiPost<LicenseInfo>('/api/license/activate', { license: text })\n      license = res\n      licenseInput = ''\n      inputMode = 'idle'\n      success('License activated successfully')\n    } catch (e: any) {\n      toastError(e.message || 'Failed to activate license')\n    } finally {\n      activating = false\n    }\n  }\n\n  async function deactivate() {\n    deactivating = true\n    try {\n      const res = await apiPost<LicenseInfo>('/api/license/deactivate')\n      license = res\n      showConfirmDeactivate = false\n      success('License deactivated')\n    } catch (e: any) {\n      toastError(e.message || 'Failed to deactivate license')\n    } finally {\n      deactivating = false\n    }\n  }\n\n  function handleFileUpload(e: Event) {\n    const target = e.target as HTMLInputElement\n    const file = target.files?.[0]\n    if (!file) return\n    const reader = new FileReader()\n    reader.onload = () => {\n      licenseInput = reader.result as string\n      inputMode = 'paste'\n    }\n    reader.readAsText(file)\n    target.value = ''\n  }\n\n  function formatDate(date: string | undefined) {\n    if (!date) return '—'\n    return new Date(date).toLocaleDateString()\n  }\n\n  function openFilePicker() {\n    fileInput?.click()\n  }\n</script>\n\n<div class=\"h-full overflow-auto\">\n  <div class=\"max-w-7xl mx-auto p-6 space-y-4\">\n    <section class=\"ds-panel overflow-hidden\">\n      <div class=\"px-5 py-4 border-b border-orange-300/25 dark:border-orange-700/35 bg-gradient-to-r from-orange-100/60 via-transparent to-transparent dark:from-orange-500/10 dark:via-transparent\">\n        <div class=\"flex flex-col gap-4 lg:flex-row lg:items-center lg:justify-between\">\n          <div class=\"flex items-center gap-4 min-w-0\">\n            <div class=\"h-16 w-16 rounded-2xl border border-orange-300/40 dark:border-orange-700/40 bg-gray-100 dark:bg-gray-900 grid place-items-center overflow-hidden shrink-0\">\n              <img src={logo} alt=\"CH-UI logo\" class=\"h-12 w-12 object-contain\" />\n            </div>\n            <div class=\"min-w-0\">\n              <div class=\"flex items-center gap-2 text-gray-900 dark:text-gray-100\">\n                <SettingsIcon size={18} class=\"text-ch-orange\" />\n                <h1 class=\"text-2xl font-semibold leading-tight\">CH-UI License</h1>\n              </div>\n              <p class=\"mt-1 text-sm text-gray-600 dark:text-gray-400\">Identity, licensing, entitlements, and legal scope controls</p>\n            </div>\n          </div>\n\n          <div class=\"flex items-center gap-2 flex-wrap\">\n            {#if licenseState === 'loading'}\n              <span class=\"ds-badge ds-badge-neutral\">Checking license...</span>\n            {:else if licenseState === 'pro'}\n              <span class=\"ds-badge ds-badge-success\">Pro Active</span>\n            {:else if licenseState === 'expired'}\n              <span class=\"ds-badge ds-badge-danger\">Pro Expired</span>\n            {:else}\n              <span class=\"ds-badge ds-badge-neutral\">Community Edition</span>\n            {/if}\n            <span class=\"ds-badge ds-badge-brand uppercase\">{license?.edition || 'community'}</span>\n          </div>\n        </div>\n      </div>\n\n      <div class=\"grid grid-cols-1 sm:grid-cols-2 xl:grid-cols-4 gap-3 p-5\">\n        <div class=\"ds-panel-muted px-3 py-2\">\n          <p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Edition</p>\n          <p class=\"mt-1 text-sm font-semibold text-gray-900 dark:text-gray-100\">{license?.edition || 'community'}</p>\n        </div>\n        <div class=\"ds-panel-muted px-3 py-2\">\n          <p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Customer</p>\n          <p class=\"mt-1 text-sm font-semibold text-gray-900 dark:text-gray-100\">{license?.customer || 'Open Source Deployment'}</p>\n        </div>\n        <div class=\"ds-panel-muted px-3 py-2\">\n          <p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">License ID</p>\n          <p class=\"mt-1 text-sm font-semibold text-gray-900 dark:text-gray-100 truncate\">{license?.license_id || '—'}</p>\n        </div>\n        <div class=\"ds-panel-muted px-3 py-2\">\n          <p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Expiration</p>\n          <p class=\"mt-1 text-sm font-semibold text-gray-900 dark:text-gray-100\">{formatDate(license?.expires_at)}</p>\n        </div>\n      </div>\n    </section>\n\n    <div class=\"ds-panel p-2\">\n      <nav class=\"ds-tabs border-0 px-1 pt-0 gap-1 overflow-x-auto whitespace-nowrap\" aria-label=\"License Tabs\">\n        {#each settingsTabItems as item}\n          <button\n            type=\"button\"\n            class=\"ds-tab {activeTab === item.id ? 'ds-tab-active' : ''}\"\n            onclick={() => switchTab(item.id)}\n          >\n            {item.label}\n          </button>\n        {/each}\n      </nav>\n    </div>\n\n    <div class=\"grid grid-cols-1 xl:grid-cols-[1.55fr_1fr] gap-4\">\n      {#if activeTab === 'license'}\n      <section class=\"ds-panel p-5 space-y-4\">\n        <div class=\"flex items-center gap-2\">\n          <Sparkles size={16} class=\"text-ch-orange\" />\n          <h2 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200 uppercase tracking-wide\">License Control</h2>\n        </div>\n\n        {#if loading}\n          <div class=\"flex items-center gap-2 text-sm text-gray-500 dark:text-gray-400\">\n            <div class=\"w-4 h-4 border-2 border-gray-300 border-t-transparent rounded-full animate-spin\"></div>\n            Loading license status...\n          </div>\n        {:else if proActive}\n          <div class=\"space-y-4\">\n            <div class=\"flex items-center gap-2\">\n              <ShieldCheck size={18} class=\"text-emerald-500\" />\n              <span class=\"ds-badge ds-badge-success\">Pro License Active</span>\n              <span class=\"text-xs text-gray-500 dark:text-gray-400\">ID: {license?.license_id || '—'}</span>\n            </div>\n\n            <div class=\"grid grid-cols-1 sm:grid-cols-2 gap-3\">\n              <div class=\"ds-panel-muted p-3\">\n                <p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Customer</p>\n                <p class=\"text-sm font-medium text-gray-900 dark:text-gray-100 mt-1\">{license?.customer || '—'}</p>\n              </div>\n              <div class=\"ds-panel-muted p-3\">\n                <p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Expires</p>\n                <p class=\"text-sm font-medium text-gray-900 dark:text-gray-100 mt-1\">{formatDate(license?.expires_at)}</p>\n              </div>\n            </div>\n\n            <div class=\"pt-2 border-t border-gray-200 dark:border-gray-800\">\n              {#if showConfirmDeactivate}\n                <div class=\"ds-panel-muted p-3 border-red-400/35\">\n                  <p class=\"text-sm text-red-500\">Deactivate this Pro license and downgrade to Community Edition?</p>\n                  <div class=\"flex items-center gap-2 flex-wrap mt-2\">\n                    <button\n                      onclick={deactivate}\n                      disabled={deactivating}\n                      class=\"ds-btn-primary px-3 py-1 border-red-500 bg-red-600 hover:bg-red-700 disabled:opacity-50\"\n                    >\n                      {deactivating ? 'Deactivating...' : 'Confirm Deactivate'}\n                    </button>\n                    <button\n                      onclick={() => showConfirmDeactivate = false}\n                      class=\"ds-btn-outline px-3 py-1\"\n                    >\n                      Cancel\n                    </button>\n                  </div>\n                </div>\n              {:else}\n                <button\n                  onclick={() => showConfirmDeactivate = true}\n                  class=\"text-xs text-red-500 hover:text-red-400\"\n                >\n                  Deactivate License\n                </button>\n              {/if}\n            </div>\n          </div>\n        {:else if expiredLicense}\n          <div class=\"space-y-3\">\n            <div class=\"flex items-center gap-2\">\n              <ShieldAlert size={18} class=\"text-red-500\" />\n              <span class=\"ds-badge ds-badge-danger\">License Expired</span>\n            </div>\n            <div class=\"text-sm text-gray-500 dark:text-gray-400\">Customer: {license?.customer || '—'}</div>\n            <div class=\"text-sm text-red-500\">Expired on {formatDate(license?.expires_at)}</div>\n            <p class=\"text-sm text-gray-500 dark:text-gray-400\">Activate a new Pro license to restore proprietary features.</p>\n          </div>\n        {:else}\n          <div class=\"space-y-3\">\n            <div class=\"flex items-center gap-2\">\n              <Shield size={18} class=\"text-gray-400\" />\n              <span class=\"ds-badge ds-badge-neutral\">Community Edition</span>\n            </div>\n            <p class=\"text-sm text-gray-500 dark:text-gray-400\">\n              Core capabilities are enabled under Apache-2.0. Activate Pro to unlock proprietary modules.\n            </p>\n          </div>\n        {/if}\n\n        {#if !proActive}\n          <div class=\"pt-3 border-t border-gray-200 dark:border-gray-800 space-y-3\">\n            <h3 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200\">Activate Pro License</h3>\n\n            {#if inputMode === 'idle'}\n              <div class=\"flex items-center gap-2 flex-wrap\">\n                <button\n                  onclick={() => inputMode = 'paste'}\n                  class=\"ds-btn-primary px-4 py-2\"\n                >\n                  Paste License JSON\n                </button>\n                <button\n                  onclick={openFilePicker}\n                  class=\"ds-btn-outline px-4 py-2\"\n                >\n                  <Upload size={14} /> Upload License File\n                </button>\n              </div>\n            {:else}\n              <div class=\"space-y-3\">\n                <div class=\"relative\">\n                  <textarea\n                    bind:value={licenseInput}\n                    placeholder=\"Paste signed license JSON here...\"\n                    rows={8}\n                    class=\"ds-textarea text-xs font-mono resize-y\"\n                  ></textarea>\n                  <button\n                    onclick={() => { licenseInput = ''; inputMode = 'idle' }}\n                    class=\"absolute top-2 right-2 text-gray-400 hover:text-gray-200\"\n                    title=\"Cancel\"\n                  >\n                    <X size={14} />\n                  </button>\n                </div>\n                <div class=\"flex items-center gap-2 flex-wrap\">\n                  <button\n                    onclick={activate}\n                    disabled={activating || !licenseInput.trim()}\n                    class=\"ds-btn-primary px-4 py-2 disabled:opacity-50 disabled:cursor-not-allowed\"\n                  >\n                    {activating ? 'Activating...' : 'Activate License'}\n                  </button>\n                  <button\n                    onclick={openFilePicker}\n                    class=\"ds-btn-outline px-4 py-2\"\n                  >\n                    <Upload size={14} /> Replace from File\n                  </button>\n                </div>\n              </div>\n            {/if}\n          </div>\n        {/if}\n\n        <input\n          bind:this={fileInput}\n          type=\"file\"\n          accept=\".json,application/json\"\n          onchange={handleFileUpload}\n          class=\"hidden\"\n        />\n      </section>\n      {/if}\n\n      {#if activeTab === 'license' || activeTab === 'instance' || activeTab === 'legal'}\n      <aside class=\"space-y-4\">\n        {#if activeTab === 'license' || activeTab === 'instance'}\n        <section class=\"ds-panel p-5\">\n          <div class=\"flex items-center gap-2 mb-3\">\n            <Building2 size={16} class=\"text-ch-orange\" />\n            <h2 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200 uppercase tracking-wide\">Brand & Instance</h2>\n          </div>\n\n          <div class=\"flex items-center gap-3\">\n            <img src={logo} alt=\"CH-UI mark\" class=\"h-12 w-12 rounded-xl border border-orange-300/35 dark:border-orange-700/35 bg-gray-100 dark:bg-gray-900 p-1.5\" />\n            <div>\n              <p class=\"text-sm font-semibold text-gray-900 dark:text-gray-100\">CH-UI</p>\n              <p class=\"text-xs text-gray-500 dark:text-gray-400\">ClickHouse operations and analytics control surface</p>\n            </div>\n          </div>\n\n          <div class=\"grid grid-cols-1 sm:grid-cols-2 gap-2 mt-3\">\n            <div class=\"ds-panel-muted p-2.5\">\n              <p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Connected As</p>\n              <p class=\"text-xs font-medium text-gray-800 dark:text-gray-200 mt-1\">{session?.user || '—'}</p>\n            </div>\n            <div class=\"ds-panel-muted p-2.5\">\n              <p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Role</p>\n              <p class=\"text-xs font-medium text-gray-800 dark:text-gray-200 mt-1 uppercase\">{session?.role || '—'}</p>\n            </div>\n            <div class=\"ds-panel-muted p-2.5\">\n              <p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Connection</p>\n              <p class=\"text-xs font-medium text-gray-800 dark:text-gray-200 mt-1\">{session?.connectionName || '—'}</p>\n            </div>\n            <div class=\"ds-panel-muted p-2.5\">\n              <p class=\"text-[11px] uppercase tracking-wide text-gray-500 dark:text-gray-400\">Version</p>\n              <p class=\"text-xs font-medium text-gray-800 dark:text-gray-200 mt-1\">{session?.version || '—'}</p>\n            </div>\n          </div>\n        </section>\n        {/if}\n\n        {#if activeTab === 'license' || activeTab === 'legal'}\n        <section class=\"ds-panel p-5\">\n          <div class=\"flex items-center gap-2 mb-3\">\n            <Scale size={16} class=\"text-ch-orange\" />\n            <h2 class=\"text-sm font-semibold text-gray-800 dark:text-gray-200 uppercase tracking-wide\">Legal Scope</h2>\n          </div>\n\n          <div class=\"space-y-2\">\n            {#each legalScopes as item}\n              <div class=\"ds-panel-muted p-3\">\n                <div class=\"flex items-center justify-between gap-2 mb-1.5\">\n                  <p class=\"text-sm font-semibold text-gray-900 dark:text-gray-100\">{item.title}</p>\n                  <span class=\"ds-badge {item.badge === 'Apache-2.0' ? 'ds-badge-success' : item.badge === 'Proprietary' ? 'ds-badge-danger' : 'ds-badge-brand'}\">{item.badge}</span>\n                </div>\n                <p class=\"text-xs text-gray-500 dark:text-gray-400 leading-relaxed\">{item.description}</p>\n              </div>\n            {/each}\n          </div>\n\n          <div class=\"mt-3 flex items-center gap-2 flex-wrap\">\n            <a\n              href=\"https://www.apache.org/licenses/LICENSE-2.0\"\n              target=\"_blank\"\n              rel=\"noreferrer\"\n              class=\"ds-btn-outline px-3 py-1\"\n            >\n              <FileText size={12} /> Apache-2.0 Text <ExternalLink size={12} />\n            </a>\n            <a\n              href=\"https://github.com/caioricciuti/ch-ui/blob/main/docs/license.md\"\n              target=\"_blank\"\n              rel=\"noreferrer\"\n              class=\"ds-btn-outline px-3 py-1\"\n            >\n              <KeyRound size={12} /> CH-UI License Policy <ExternalLink size={12} />\n            </a>\n            <a\n              href=\"https://github.com/caioricciuti/ch-ui/blob/main/docs/legal/terms-of-service.md\"\n              target=\"_blank\"\n              rel=\"noreferrer\"\n              class=\"ds-btn-outline px-3 py-1\"\n            >\n              Terms <ExternalLink size={12} />\n            </a>\n            <a\n              href=\"https://github.com/caioricciuti/ch-ui/blob/main/docs/legal/privacy-policy.md\"\n              target=\"_blank\"\n              rel=\"noreferrer\"\n              class=\"ds-btn-outline px-3 py-1\"\n            >\n              Privacy <ExternalLink size={12} />\n            </a>\n          </div>\n        </section>\n        {/if}\n      </aside>\n      {/if}\n    </div>\n\n  </div>\n</div>\n"
  },
  {
    "path": "ui/svelte.config.js",
    "content": "import { vitePreprocess } from '@sveltejs/vite-plugin-svelte'\n\n/** @type {import(\"@sveltejs/vite-plugin-svelte\").SvelteConfig} */\nexport default {\n  // Consult https://svelte.dev/docs#compile-time-svelte-preprocess\n  // for more information about preprocessors\n  preprocess: vitePreprocess(),\n}\n"
  },
  {
    "path": "ui/tsconfig.app.json",
    "content": "{\n  \"extends\": \"@tsconfig/svelte/tsconfig.json\",\n  \"compilerOptions\": {\n    \"tsBuildInfoFile\": \"./node_modules/.tmp/tsconfig.app.tsbuildinfo\",\n    \"target\": \"ES2022\",\n    \"useDefineForClassFields\": true,\n    \"module\": \"ESNext\",\n    \"types\": [\"svelte\", \"vite/client\"],\n    \"composite\": true,\n    \"emitDeclarationOnly\": true,\n    /**\n     * Typecheck JS in `.svelte` and `.js` files by default.\n     * Disable checkJs if you'd like to use dynamic types in JS.\n     * Note that setting allowJs false does not prevent the use\n     * of JS in `.svelte` files.\n     */\n    \"allowJs\": true,\n    \"checkJs\": true,\n    \"moduleDetection\": \"force\"\n  },\n  \"include\": [\"src/**/*.ts\", \"src/**/*.js\", \"src/**/*.svelte\"]\n}\n"
  },
  {
    "path": "ui/tsconfig.json",
    "content": "{\n  \"files\": [],\n  \"references\": [\n    { \"path\": \"./tsconfig.app.json\" },\n    { \"path\": \"./tsconfig.node.json\" }\n  ]\n}\n"
  },
  {
    "path": "ui/tsconfig.node.json",
    "content": "{\n  \"compilerOptions\": {\n    \"tsBuildInfoFile\": \"./node_modules/.tmp/tsconfig.node.tsbuildinfo\",\n    \"target\": \"ES2023\",\n    \"lib\": [\"ES2023\"],\n    \"module\": \"ESNext\",\n    \"types\": [\"node\"],\n    \"skipLibCheck\": true,\n\n    /* Bundler mode */\n    \"moduleResolution\": \"bundler\",\n    \"allowImportingTsExtensions\": true,\n    \"verbatimModuleSyntax\": true,\n    \"moduleDetection\": \"force\",\n    \"composite\": true,\n    \"emitDeclarationOnly\": true,\n\n    /* Linting */\n    \"strict\": true,\n    \"noUnusedLocals\": true,\n    \"noUnusedParameters\": true,\n    \"erasableSyntaxOnly\": true,\n    \"noFallthroughCasesInSwitch\": true,\n    \"noUncheckedSideEffectImports\": true\n  },\n  \"include\": [\"vite.config.ts\"]\n}\n"
  },
  {
    "path": "ui/vite.config.d.ts",
    "content": "declare const _default: import(\"vite\").UserConfig;\nexport default _default;\n"
  },
  {
    "path": "ui/vite.config.ts",
    "content": "import { defineConfig } from 'vite'\nimport { svelte } from '@sveltejs/vite-plugin-svelte'\nimport tailwindcss from '@tailwindcss/vite'\n\nexport default defineConfig({\n  appType: 'spa',\n  plugins: [svelte(), tailwindcss()],\n  base: process.env.VITE_BASE_PATH ? process.env.VITE_BASE_PATH + '/' : '/',\n  resolve: {\n    dedupe: [\n      '@codemirror/state',\n      '@codemirror/view',\n      '@codemirror/language',\n      '@codemirror/autocomplete',\n      '@codemirror/commands',\n      '@codemirror/search',\n      '@lezer/common',\n      '@lezer/highlight',\n      '@lezer/lr',\n    ],\n  },\n  server: {\n    host: '127.0.0.1',\n    port: 5173,\n    proxy: {\n      '/api': 'http://127.0.0.1:3488',\n      '/connect': { target: 'ws://127.0.0.1:3488', ws: true },\n      '/health': 'http://127.0.0.1:3488',\n      '/install': 'http://127.0.0.1:3488',\n      '/download': 'http://127.0.0.1:3488',\n    },\n  },\n  preview: {\n    host: '127.0.0.1',\n  },\n  build: {\n    target: 'es2022',\n    minify: process.env.CHUI_VITE_MINIFY !== '0',\n    rollupOptions: {\n      output: {\n        manualChunks(id) {\n          if (!id.includes('node_modules')) return undefined\n          if (id.includes('@codemirror') || id.includes('@lezer')) return 'codemirror'\n          if (id.includes('lucide-svelte')) return 'icons'\n          if (id.includes('uplot')) return 'charts'\n          return 'vendor'\n        },\n      },\n    },\n  },\n})\n"
  },
  {
    "path": "ui/vitest.config.ts",
    "content": "import { defineConfig } from 'vitest/config'\n\nexport default defineConfig({\n  test: {\n    environment: 'node',\n    include: ['src/**/*.test.ts'],\n  },\n})\n"
  }
]